diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..bb49308 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,39 @@ +name: CI + +on: + push: + branches: [master, rc/*] + pull_request: + branches: [master, rc/*] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: '1.21' + + - name: Run tests + run: | + cd codeagent-wrapper + go test -v -cover -coverprofile=coverage.out ./... + + - name: Check coverage + run: | + cd codeagent-wrapper + go tool cover -func=coverage.out | grep total | awk '{print $3}' + + - name: Validate JSON configs + run: | + jq empty .claude/settings.json + jq empty .claude/skills/skill-rules.json + + - name: Upload coverage + uses: codecov/codecov-action@v4 + with: + file: codeagent-wrapper/coverage.out + continue-on-error: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 262f01b..8f774ea 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -name: Release codex-wrapper +name: Release codeagent-wrapper on: push: @@ -22,11 +22,11 @@ jobs: go-version: '1.21' - name: Run tests - working-directory: codex-wrapper + working-directory: codeagent-wrapper run: go test -v -coverprofile=cover.out ./... - name: Check coverage - working-directory: codex-wrapper + working-directory: codeagent-wrapper run: | go tool cover -func=cover.out | grep total COVERAGE=$(go tool cover -func=cover.out | grep total | awk '{print $3}' | sed 's/%//') @@ -58,22 +58,22 @@ jobs: go-version: '1.21' - name: Build binary - working-directory: codex-wrapper + working-directory: codeagent-wrapper env: GOOS: ${{ matrix.goos }} GOARCH: ${{ matrix.goarch }} CGO_ENABLED: 0 run: | VERSION=${GITHUB_REF#refs/tags/} - OUTPUT_NAME=codex-wrapper-${{ matrix.goos }}-${{ matrix.goarch }} + OUTPUT_NAME=codeagent-wrapper-${{ matrix.goos }}-${{ matrix.goarch }} go build -ldflags="-s -w -X main.version=${VERSION}" -o ${OUTPUT_NAME} . chmod +x ${OUTPUT_NAME} - name: Upload artifact uses: actions/upload-artifact@v4 with: - name: codex-wrapper-${{ matrix.goos }}-${{ matrix.goarch }} - path: codex-wrapper/codex-wrapper-${{ matrix.goos }}-${{ matrix.goarch }} + name: codeagent-wrapper-${{ matrix.goos }}-${{ matrix.goarch }} + path: codeagent-wrapper/codeagent-wrapper-${{ matrix.goos }}-${{ matrix.goarch }} release: name: Create Release @@ -91,7 +91,7 @@ jobs: - name: Prepare release files run: | mkdir -p release - find artifacts -type f -name "codex-wrapper-*" -exec mv {} release/ \; + find artifacts -type f -name "codeagent-wrapper-*" -exec mv {} release/ \; cp install.sh release/ ls -la release/ diff --git a/README.md b/README.md index 8ad17f6..2422751 100644 --- a/README.md +++ b/README.md @@ -122,6 +122,12 @@ Requirements → Architecture → Sprint Plan → Development → Review → QA **Best For:** Quick tasks, no workflow overhead needed +## Enterprise Workflow Features + +- **Multi-backend execution:** `codeagent-wrapper --backend codex|claude|gemini` (default `codex`) so you can match the model to the task without changing workflows. +- **GitHub workflow commands:** `/gh-create-issue "short need"` creates structured issues; `/gh-implement 123` pulls issue #123, drives development, and prepares the PR. +- **Skills + hooks activation:** .claude/hooks run automation (tests, reviews), while `.claude/skills/skill-rules.json` auto-suggests the right skills. Keep hooks enabled in `.claude/settings.json` to activate the enterprise workflow helpers. + --- ## Installation @@ -204,7 +210,7 @@ The `codex` skill enables Claude Code to delegate code execution to Codex CLI. ```bash # Codex is invoked via the skill -codex-wrapper - <<'EOF' +codeagent-wrapper - <<'EOF' implement @src/auth.ts with JWT validation EOF ``` @@ -212,7 +218,7 @@ EOF ### Parallel Execution ```bash -codex-wrapper --parallel <<'EOF' +codeagent-wrapper --parallel <<'EOF' ---TASK--- id: backend_api workdir: /project/backend @@ -282,6 +288,20 @@ python3 install.py --module dev --force --- +## Documentation + +### Core Guides +- **[Architecture Overview](docs/architecture.md)** - System architecture and component design +- **[Codeagent-Wrapper Guide](docs/CODEAGENT-WRAPPER.md)** - Multi-backend execution wrapper +- **[GitHub Workflow Guide](docs/GITHUB-WORKFLOW.md)** - Issue-to-PR automation +- **[Hooks Documentation](docs/HOOKS.md)** - Custom hooks and automation + +### Additional Resources +- **[Enterprise Workflow Ideas](docs/enterprise-workflow-ideas.md)** - Advanced patterns and best practices +- **[Installation Log](install.log)** - Installation history and troubleshooting + +--- + ## License MIT License - see [LICENSE](LICENSE) diff --git a/README_CN.md b/README_CN.md index 1196fe0..9c6089c 100644 --- a/README_CN.md +++ b/README_CN.md @@ -201,7 +201,7 @@ python3 install.py --force ```bash # 通过技能调用 Codex -codex-wrapper - <<'EOF' +codeagent-wrapper - <<'EOF' 在 @src/auth.ts 中实现 JWT 验证 EOF ``` @@ -209,7 +209,7 @@ EOF ### 并行执行 ```bash -codex-wrapper --parallel <<'EOF' +codeagent-wrapper --parallel <<'EOF' ---TASK--- id: backend_api workdir: /project/backend diff --git a/codeagent-wrapper/backend.go b/codeagent-wrapper/backend.go new file mode 100644 index 0000000..59b8f5e --- /dev/null +++ b/codeagent-wrapper/backend.go @@ -0,0 +1,54 @@ +package main + +// Backend defines the contract for invoking different AI CLI backends. +// Each backend is responsible for supplying the executable command and +// building the argument list based on the wrapper config. +type Backend interface { + Name() string + BuildArgs(cfg *Config, targetArg string) []string + Command() string +} + +type CodexBackend struct{} + +func (CodexBackend) Name() string { return "codex" } +func (CodexBackend) Command() string { + return "codex" +} +func (CodexBackend) BuildArgs(cfg *Config, targetArg string) []string { + return buildCodexArgs(cfg, targetArg) +} + +type ClaudeBackend struct{} + +func (ClaudeBackend) Name() string { return "claude" } +func (ClaudeBackend) Command() string { + return "claude" +} +func (ClaudeBackend) BuildArgs(cfg *Config, targetArg string) []string { + if cfg == nil { + return nil + } + // claude -p --dangerously-skip-permissions --output-format stream-json --verbose + args := []string{ + "-p", + "--dangerously-skip-permissions", + "--output-format", "stream-json", + "--verbose", + } + return append(args, targetArg) +} + +type GeminiBackend struct{} + +func (GeminiBackend) Name() string { return "gemini" } +func (GeminiBackend) Command() string { + return "gemini" +} +func (GeminiBackend) BuildArgs(cfg *Config, targetArg string) []string { + if cfg == nil { + return nil + } + // gemini -o stream-json -y -p + return []string{"-o", "stream-json", "-y", "-p", targetArg} +} diff --git a/codex-wrapper/bench_test.go b/codeagent-wrapper/bench_test.go similarity index 100% rename from codex-wrapper/bench_test.go rename to codeagent-wrapper/bench_test.go diff --git a/codeagent-wrapper/codeagent-wrapper b/codeagent-wrapper/codeagent-wrapper new file mode 100755 index 0000000..1f58d91 Binary files /dev/null and b/codeagent-wrapper/codeagent-wrapper differ diff --git a/codex-wrapper/concurrent_stress_test.go b/codeagent-wrapper/concurrent_stress_test.go similarity index 100% rename from codex-wrapper/concurrent_stress_test.go rename to codeagent-wrapper/concurrent_stress_test.go diff --git a/codeagent-wrapper/config.go b/codeagent-wrapper/config.go new file mode 100644 index 0000000..d64cd5c --- /dev/null +++ b/codeagent-wrapper/config.go @@ -0,0 +1,197 @@ +package main + +import ( + "bytes" + "fmt" + "os" + "strings" +) + +// Config holds CLI configuration +type Config struct { + Mode string // "new" or "resume" + Task string + SessionID string + WorkDir string + ExplicitStdin bool + Timeout int + Backend string +} + +// ParallelConfig defines the JSON schema for parallel execution +type ParallelConfig struct { + Tasks []TaskSpec `json:"tasks"` +} + +// TaskSpec describes an individual task entry in the parallel config +type TaskSpec struct { + ID string `json:"id"` + Task string `json:"task"` + WorkDir string `json:"workdir,omitempty"` + Dependencies []string `json:"dependencies,omitempty"` + SessionID string `json:"session_id,omitempty"` + Mode string `json:"-"` + UseStdin bool `json:"-"` +} + +// TaskResult captures the execution outcome of a task +type TaskResult struct { + TaskID string `json:"task_id"` + ExitCode int `json:"exit_code"` + Message string `json:"message"` + SessionID string `json:"session_id"` + Error string `json:"error"` +} + +var backendRegistry = map[string]Backend{ + "codex": CodexBackend{}, + "claude": ClaudeBackend{}, + "gemini": GeminiBackend{}, +} + +func selectBackend(name string) (Backend, error) { + key := strings.ToLower(strings.TrimSpace(name)) + if key == "" { + key = defaultBackendName + } + if backend, ok := backendRegistry[key]; ok { + return backend, nil + } + return nil, fmt.Errorf("unsupported backend %q", name) +} + +func parseParallelConfig(data []byte) (*ParallelConfig, error) { + trimmed := bytes.TrimSpace(data) + if len(trimmed) == 0 { + return nil, fmt.Errorf("parallel config is empty") + } + + tasks := strings.Split(string(trimmed), "---TASK---") + var cfg ParallelConfig + seen := make(map[string]struct{}) + + for _, taskBlock := range tasks { + taskBlock = strings.TrimSpace(taskBlock) + if taskBlock == "" { + continue + } + + parts := strings.SplitN(taskBlock, "---CONTENT---", 2) + if len(parts) != 2 { + return nil, fmt.Errorf("task block missing ---CONTENT--- separator") + } + + meta := strings.TrimSpace(parts[0]) + content := strings.TrimSpace(parts[1]) + + task := TaskSpec{WorkDir: defaultWorkdir} + for _, line := range strings.Split(meta, "\n") { + line = strings.TrimSpace(line) + if line == "" { + continue + } + kv := strings.SplitN(line, ":", 2) + if len(kv) != 2 { + continue + } + key := strings.TrimSpace(kv[0]) + value := strings.TrimSpace(kv[1]) + + switch key { + case "id": + task.ID = value + case "workdir": + task.WorkDir = value + case "session_id": + task.SessionID = value + task.Mode = "resume" + case "dependencies": + for _, dep := range strings.Split(value, ",") { + dep = strings.TrimSpace(dep) + if dep != "" { + task.Dependencies = append(task.Dependencies, dep) + } + } + } + } + + if task.ID == "" { + return nil, fmt.Errorf("task missing id field") + } + if content == "" { + return nil, fmt.Errorf("task %q missing content", task.ID) + } + if _, exists := seen[task.ID]; exists { + return nil, fmt.Errorf("duplicate task id: %s", task.ID) + } + + task.Task = content + cfg.Tasks = append(cfg.Tasks, task) + seen[task.ID] = struct{}{} + } + + if len(cfg.Tasks) == 0 { + return nil, fmt.Errorf("no tasks found") + } + + return &cfg, nil +} + +func parseArgs() (*Config, error) { + args := os.Args[1:] + if len(args) == 0 { + return nil, fmt.Errorf("task required") + } + + backendName := defaultBackendName + filtered := make([]string, 0, len(args)) + for i := 0; i < len(args); i++ { + arg := args[i] + switch { + case arg == "--backend": + if i+1 >= len(args) { + return nil, fmt.Errorf("--backend flag requires a value") + } + backendName = args[i+1] + i++ + continue + case strings.HasPrefix(arg, "--backend="): + value := strings.TrimPrefix(arg, "--backend=") + if value == "" { + return nil, fmt.Errorf("--backend flag requires a value") + } + backendName = value + continue + } + filtered = append(filtered, arg) + } + + if len(filtered) == 0 { + return nil, fmt.Errorf("task required") + } + args = filtered + + cfg := &Config{WorkDir: defaultWorkdir, Backend: backendName} + + if args[0] == "resume" { + if len(args) < 3 { + return nil, fmt.Errorf("resume mode requires: resume ") + } + cfg.Mode = "resume" + cfg.SessionID = args[1] + cfg.Task = args[2] + cfg.ExplicitStdin = (args[2] == "-") + if len(args) > 3 { + cfg.WorkDir = args[3] + } + } else { + cfg.Mode = "new" + cfg.Task = args[0] + cfg.ExplicitStdin = (args[0] == "-") + if len(args) > 1 { + cfg.WorkDir = args[1] + } + } + + return cfg, nil +} diff --git a/codeagent-wrapper/coverage.out b/codeagent-wrapper/coverage.out new file mode 100644 index 0000000..e494cd3 --- /dev/null +++ b/codeagent-wrapper/coverage.out @@ -0,0 +1,508 @@ +mode: set +codeagent-wrapper/backend.go:14.35,14.53 1 1 +codeagent-wrapper/backend.go:15.38,17.2 1 1 +codeagent-wrapper/backend.go:18.71,20.2 1 1 +codeagent-wrapper/backend.go:24.36,24.55 1 1 +codeagent-wrapper/backend.go:25.39,27.2 1 1 +codeagent-wrapper/backend.go:28.72,29.16 1 1 +codeagent-wrapper/backend.go:29.16,31.3 1 1 +codeagent-wrapper/backend.go:33.2,33.26 1 1 +codeagent-wrapper/backend.go:33.26,41.3 1 1 +codeagent-wrapper/backend.go:43.2,47.56 2 1 +codeagent-wrapper/backend.go:47.56,49.3 1 1 +codeagent-wrapper/backend.go:50.2,50.32 1 1 +codeagent-wrapper/backend.go:55.36,55.55 1 1 +codeagent-wrapper/backend.go:56.39,58.2 1 1 +codeagent-wrapper/backend.go:59.72,60.40 1 1 +codeagent-wrapper/backend.go:60.40,62.3 1 1 +codeagent-wrapper/backend.go:63.2,63.28 1 1 +codeagent-wrapper/logger.go:37.35,39.2 1 1 +codeagent-wrapper/logger.go:43.58,45.18 2 1 +codeagent-wrapper/logger.go:45.18,47.3 1 1 +codeagent-wrapper/logger.go:48.2,53.16 4 1 +codeagent-wrapper/logger.go:53.16,55.3 1 0 +codeagent-wrapper/logger.go:57.2,69.15 4 1 +codeagent-wrapper/logger.go:73.32,74.14 1 1 +codeagent-wrapper/logger.go:74.14,76.3 1 1 +codeagent-wrapper/logger.go:77.2,77.15 1 1 +codeagent-wrapper/logger.go:81.35,81.57 1 1 +codeagent-wrapper/logger.go:84.35,84.57 1 1 +codeagent-wrapper/logger.go:87.36,87.59 1 1 +codeagent-wrapper/logger.go:90.36,90.59 1 1 +codeagent-wrapper/logger.go:96.32,97.14 1 1 +codeagent-wrapper/logger.go:97.14,99.3 1 0 +codeagent-wrapper/logger.go:101.2,103.24 2 1 +codeagent-wrapper/logger.go:103.24,110.13 5 1 +codeagent-wrapper/logger.go:110.13,113.4 2 1 +codeagent-wrapper/logger.go:115.3,115.10 1 1 +codeagent-wrapper/logger.go:116.21,116.21 0 1 +codeagent-wrapper/logger.go:118.38,120.63 1 0 +codeagent-wrapper/logger.go:123.3,123.61 1 1 +codeagent-wrapper/logger.go:123.61,125.4 1 0 +codeagent-wrapper/logger.go:127.3,127.58 1 1 +codeagent-wrapper/logger.go:127.58,129.4 1 0 +codeagent-wrapper/logger.go:131.3,131.59 1 1 +codeagent-wrapper/logger.go:131.59,133.4 1 0 +codeagent-wrapper/logger.go:139.2,139.17 1 1 +codeagent-wrapper/logger.go:143.40,144.14 1 1 +codeagent-wrapper/logger.go:144.14,146.3 1 1 +codeagent-wrapper/logger.go:147.2,147.26 1 1 +codeagent-wrapper/logger.go:152.26,153.14 1 1 +codeagent-wrapper/logger.go:153.14,155.3 1 0 +codeagent-wrapper/logger.go:158.2,159.12 2 1 +codeagent-wrapper/logger.go:159.12,162.3 2 1 +codeagent-wrapper/logger.go:164.2,167.9 3 1 +codeagent-wrapper/logger.go:168.14,168.14 0 1 +codeagent-wrapper/logger.go:170.20,172.9 1 0 +codeagent-wrapper/logger.go:176.2,177.9 2 1 +codeagent-wrapper/logger.go:178.31,180.10 1 1 +codeagent-wrapper/logger.go:181.20,181.20 0 1 +codeagent-wrapper/logger.go:183.38,183.38 0 0 +codeagent-wrapper/logger.go:186.16,186.16 0 0 +codeagent-wrapper/logger.go:188.37,188.37 0 0 +codeagent-wrapper/logger.go:193.41,194.14 1 1 +codeagent-wrapper/logger.go:194.14,196.3 1 0 +codeagent-wrapper/logger.go:197.2,197.21 1 1 +codeagent-wrapper/logger.go:197.21,199.3 1 1 +codeagent-wrapper/logger.go:201.2,204.9 3 1 +codeagent-wrapper/logger.go:205.21,205.21 0 1 +codeagent-wrapper/logger.go:207.16,210.9 2 1 +codeagent-wrapper/logger.go:214.24,220.6 4 1 +codeagent-wrapper/logger.go:220.6,221.10 1 1 +codeagent-wrapper/logger.go:222.28,223.11 1 1 +codeagent-wrapper/logger.go:223.11,227.5 2 1 +codeagent-wrapper/logger.go:228.4,231.22 4 1 +codeagent-wrapper/logger.go:233.19,234.20 1 0 +codeagent-wrapper/logger.go:236.34,240.20 3 1 +codeagent-wrapper/main.go:65.50,67.15 2 1 +codeagent-wrapper/main.go:67.15,69.3 1 1 +codeagent-wrapper/main.go:70.2,70.45 1 1 +codeagent-wrapper/main.go:70.45,72.3 1 1 +codeagent-wrapper/main.go:73.2,73.56 1 1 +codeagent-wrapper/main.go:101.64,103.23 2 1 +codeagent-wrapper/main.go:103.23,105.3 1 0 +codeagent-wrapper/main.go:107.2,111.34 4 1 +codeagent-wrapper/main.go:111.34,113.22 2 1 +codeagent-wrapper/main.go:113.22,114.12 1 1 +codeagent-wrapper/main.go:117.3,118.22 2 1 +codeagent-wrapper/main.go:118.22,120.4 1 1 +codeagent-wrapper/main.go:122.3,126.50 4 1 +codeagent-wrapper/main.go:126.50,128.18 2 1 +codeagent-wrapper/main.go:128.18,129.13 1 1 +codeagent-wrapper/main.go:131.4,132.20 2 1 +codeagent-wrapper/main.go:132.20,133.13 1 0 +codeagent-wrapper/main.go:135.4,138.15 3 1 +codeagent-wrapper/main.go:139.14,140.20 1 1 +codeagent-wrapper/main.go:141.19,142.25 1 1 +codeagent-wrapper/main.go:143.22,145.25 2 0 +codeagent-wrapper/main.go:146.24,147.51 1 1 +codeagent-wrapper/main.go:147.51,149.19 2 1 +codeagent-wrapper/main.go:149.19,151.7 1 1 +codeagent-wrapper/main.go:156.3,156.20 1 1 +codeagent-wrapper/main.go:156.20,158.4 1 1 +codeagent-wrapper/main.go:159.3,159.20 1 1 +codeagent-wrapper/main.go:159.20,161.4 1 1 +codeagent-wrapper/main.go:162.3,162.41 1 1 +codeagent-wrapper/main.go:162.41,164.4 1 1 +codeagent-wrapper/main.go:166.3,168.29 3 1 +codeagent-wrapper/main.go:171.2,171.25 1 1 +codeagent-wrapper/main.go:171.25,173.3 1 0 +codeagent-wrapper/main.go:175.2,175.18 1 1 +codeagent-wrapper/main.go:178.62,183.29 4 1 +codeagent-wrapper/main.go:183.29,186.3 2 1 +codeagent-wrapper/main.go:188.2,188.29 1 1 +codeagent-wrapper/main.go:188.29,189.41 1 1 +codeagent-wrapper/main.go:189.41,190.35 1 1 +codeagent-wrapper/main.go:190.35,192.5 1 1 +codeagent-wrapper/main.go:193.4,194.40 2 1 +codeagent-wrapper/main.go:198.2,199.29 2 1 +codeagent-wrapper/main.go:199.29,200.29 1 1 +codeagent-wrapper/main.go:200.29,202.4 1 1 +codeagent-wrapper/main.go:205.2,208.21 3 1 +codeagent-wrapper/main.go:208.21,212.30 4 1 +codeagent-wrapper/main.go:212.30,215.4 2 1 +codeagent-wrapper/main.go:216.3,219.30 3 1 +codeagent-wrapper/main.go:219.30,220.37 1 1 +codeagent-wrapper/main.go:220.37,222.32 2 1 +codeagent-wrapper/main.go:222.32,224.6 1 1 +codeagent-wrapper/main.go:227.3,227.33 1 1 +codeagent-wrapper/main.go:230.2,230.29 1 1 +codeagent-wrapper/main.go:230.29,232.33 2 1 +codeagent-wrapper/main.go:232.33,233.15 1 1 +codeagent-wrapper/main.go:233.15,235.5 1 1 +codeagent-wrapper/main.go:237.3,238.92 2 1 +codeagent-wrapper/main.go:241.2,241.20 1 1 +codeagent-wrapper/main.go:244.66,245.24 1 0 +codeagent-wrapper/main.go:245.24,247.3 1 0 +codeagent-wrapper/main.go:248.2,248.21 1 0 +codeagent-wrapper/main.go:248.21,250.3 1 0 +codeagent-wrapper/main.go:251.2,251.55 1 0 +codeagent-wrapper/main.go:251.55,253.3 1 0 +codeagent-wrapper/main.go:255.2,255.42 1 0 +codeagent-wrapper/main.go:258.71,260.31 2 1 +codeagent-wrapper/main.go:260.31,262.3 1 1 +codeagent-wrapper/main.go:264.2,268.31 4 1 +codeagent-wrapper/main.go:268.31,272.30 3 1 +codeagent-wrapper/main.go:272.30,273.58 1 1 +codeagent-wrapper/main.go:273.58,277.13 4 1 +codeagent-wrapper/main.go:280.4,282.25 3 1 +codeagent-wrapper/main.go:282.25,284.18 2 1 +codeagent-wrapper/main.go:284.18,285.34 1 1 +codeagent-wrapper/main.go:285.34,287.7 1 1 +codeagent-wrapper/main.go:289.5,289.45 1 1 +codeagent-wrapper/main.go:293.3,295.33 2 1 +codeagent-wrapper/main.go:295.33,298.44 3 1 +codeagent-wrapper/main.go:298.44,300.5 1 1 +codeagent-wrapper/main.go:304.2,304.16 1 1 +codeagent-wrapper/main.go:307.81,308.33 1 1 +codeagent-wrapper/main.go:308.33,310.3 1 1 +codeagent-wrapper/main.go:312.2,313.40 2 1 +codeagent-wrapper/main.go:313.40,314.31 1 1 +codeagent-wrapper/main.go:314.31,316.4 1 1 +codeagent-wrapper/main.go:319.2,319.23 1 1 +codeagent-wrapper/main.go:319.23,321.3 1 1 +codeagent-wrapper/main.go:323.2,323.96 1 1 +codeagent-wrapper/main.go:326.55,331.30 4 1 +codeagent-wrapper/main.go:331.30,332.43 1 1 +codeagent-wrapper/main.go:332.43,334.4 1 1 +codeagent-wrapper/main.go:334.9,336.4 1 1 +codeagent-wrapper/main.go:339.2,342.30 3 1 +codeagent-wrapper/main.go:342.30,344.22 2 1 +codeagent-wrapper/main.go:344.22,346.4 1 1 +codeagent-wrapper/main.go:346.9,346.31 1 1 +codeagent-wrapper/main.go:346.31,348.4 1 0 +codeagent-wrapper/main.go:348.9,350.4 1 1 +codeagent-wrapper/main.go:351.3,351.26 1 1 +codeagent-wrapper/main.go:351.26,353.4 1 0 +codeagent-wrapper/main.go:354.3,354.24 1 1 +codeagent-wrapper/main.go:354.24,356.4 1 1 +codeagent-wrapper/main.go:357.3,357.23 1 1 +codeagent-wrapper/main.go:360.2,360.20 1 1 +codeagent-wrapper/main.go:376.13,379.2 2 0 +codeagent-wrapper/main.go:382.27,384.22 1 1 +codeagent-wrapper/main.go:384.22,385.21 1 1 +codeagent-wrapper/main.go:386.26,388.12 2 1 +codeagent-wrapper/main.go:389.23,391.12 2 1 +codeagent-wrapper/main.go:396.2,397.16 2 1 +codeagent-wrapper/main.go:397.16,400.3 2 0 +codeagent-wrapper/main.go:401.2,403.15 2 1 +codeagent-wrapper/main.go:403.15,405.20 2 1 +codeagent-wrapper/main.go:405.20,407.4 1 1 +codeagent-wrapper/main.go:408.3,408.39 1 1 +codeagent-wrapper/main.go:408.39,410.4 1 0 +codeagent-wrapper/main.go:412.3,412.20 1 1 +codeagent-wrapper/main.go:412.20,413.72 1 1 +codeagent-wrapper/main.go:413.73,415.5 0 0 +codeagent-wrapper/main.go:418.2,421.22 2 1 +codeagent-wrapper/main.go:421.22,422.21 1 1 +codeagent-wrapper/main.go:423.21,424.24 1 1 +codeagent-wrapper/main.go:424.24,431.5 6 0 +codeagent-wrapper/main.go:432.4,433.18 2 1 +codeagent-wrapper/main.go:433.18,436.5 2 0 +codeagent-wrapper/main.go:438.4,439.18 2 1 +codeagent-wrapper/main.go:439.18,442.5 2 0 +codeagent-wrapper/main.go:444.4,446.18 3 1 +codeagent-wrapper/main.go:446.18,449.5 2 1 +codeagent-wrapper/main.go:451.4,455.32 4 1 +codeagent-wrapper/main.go:455.32,456.26 1 1 +codeagent-wrapper/main.go:456.26,458.6 1 1 +codeagent-wrapper/main.go:461.4,461.19 1 1 +codeagent-wrapper/main.go:465.2,468.16 3 1 +codeagent-wrapper/main.go:468.16,471.3 2 1 +codeagent-wrapper/main.go:472.2,475.16 3 1 +codeagent-wrapper/main.go:475.16,478.3 2 1 +codeagent-wrapper/main.go:480.2,492.23 10 1 +codeagent-wrapper/main.go:492.23,495.17 3 1 +codeagent-wrapper/main.go:495.17,498.4 2 1 +codeagent-wrapper/main.go:499.3,500.21 2 1 +codeagent-wrapper/main.go:500.21,503.4 2 1 +codeagent-wrapper/main.go:504.3,504.24 1 1 +codeagent-wrapper/main.go:505.8,507.17 2 1 +codeagent-wrapper/main.go:507.17,510.4 2 1 +codeagent-wrapper/main.go:511.3,512.12 2 1 +codeagent-wrapper/main.go:512.12,514.4 1 1 +codeagent-wrapper/main.go:514.9,516.4 1 1 +codeagent-wrapper/main.go:519.2,522.14 3 1 +codeagent-wrapper/main.go:522.14,524.3 1 1 +codeagent-wrapper/main.go:525.2,534.14 7 1 +codeagent-wrapper/main.go:534.14,536.12 2 1 +codeagent-wrapper/main.go:536.12,538.4 1 1 +codeagent-wrapper/main.go:539.3,539.24 1 1 +codeagent-wrapper/main.go:539.24,541.4 1 1 +codeagent-wrapper/main.go:542.3,542.39 1 1 +codeagent-wrapper/main.go:542.39,544.4 1 1 +codeagent-wrapper/main.go:545.3,545.39 1 1 +codeagent-wrapper/main.go:545.39,547.4 1 0 +codeagent-wrapper/main.go:548.3,548.39 1 1 +codeagent-wrapper/main.go:548.39,550.4 1 0 +codeagent-wrapper/main.go:551.3,551.38 1 1 +codeagent-wrapper/main.go:551.38,553.4 1 0 +codeagent-wrapper/main.go:554.3,554.38 1 1 +codeagent-wrapper/main.go:554.38,556.4 1 0 +codeagent-wrapper/main.go:557.3,557.38 1 1 +codeagent-wrapper/main.go:557.38,559.4 1 0 +codeagent-wrapper/main.go:560.3,560.26 1 1 +codeagent-wrapper/main.go:560.26,562.4 1 0 +codeagent-wrapper/main.go:563.3,563.23 1 1 +codeagent-wrapper/main.go:563.23,565.4 1 1 +codeagent-wrapper/main.go:568.2,580.26 4 1 +codeagent-wrapper/main.go:580.26,582.3 1 1 +codeagent-wrapper/main.go:584.2,585.28 2 1 +codeagent-wrapper/main.go:585.28,587.3 1 1 +codeagent-wrapper/main.go:589.2,589.10 1 1 +codeagent-wrapper/main.go:592.35,594.20 2 1 +codeagent-wrapper/main.go:594.20,596.3 1 1 +codeagent-wrapper/main.go:598.2,600.33 3 1 +codeagent-wrapper/main.go:600.33,601.29 1 1 +codeagent-wrapper/main.go:601.29,602.24 1 1 +codeagent-wrapper/main.go:602.24,604.5 1 1 +codeagent-wrapper/main.go:605.4,607.12 3 1 +codeagent-wrapper/main.go:609.3,609.39 1 1 +codeagent-wrapper/main.go:612.2,612.24 1 1 +codeagent-wrapper/main.go:612.24,614.3 1 0 +codeagent-wrapper/main.go:615.2,619.25 3 1 +codeagent-wrapper/main.go:619.25,620.20 1 1 +codeagent-wrapper/main.go:620.20,622.4 1 1 +codeagent-wrapper/main.go:623.3,627.20 5 1 +codeagent-wrapper/main.go:627.20,629.4 1 1 +codeagent-wrapper/main.go:630.8,634.20 4 1 +codeagent-wrapper/main.go:634.20,636.4 1 1 +codeagent-wrapper/main.go:639.2,639.17 1 1 +codeagent-wrapper/main.go:642.38,643.18 1 1 +codeagent-wrapper/main.go:643.18,646.3 2 1 +codeagent-wrapper/main.go:647.2,649.16 3 1 +codeagent-wrapper/main.go:649.16,651.3 1 1 +codeagent-wrapper/main.go:652.2,652.20 1 1 +codeagent-wrapper/main.go:652.20,655.3 2 1 +codeagent-wrapper/main.go:656.2,657.26 2 1 +codeagent-wrapper/main.go:660.55,661.11 1 1 +codeagent-wrapper/main.go:661.11,663.3 1 1 +codeagent-wrapper/main.go:664.2,664.25 1 1 +codeagent-wrapper/main.go:664.25,666.3 1 1 +codeagent-wrapper/main.go:667.2,667.59 1 1 +codeagent-wrapper/main.go:670.61,671.26 1 1 +codeagent-wrapper/main.go:671.26,680.3 1 1 +codeagent-wrapper/main.go:681.2,687.3 1 1 +codeagent-wrapper/main.go:695.78,697.2 1 1 +codeagent-wrapper/main.go:699.158,702.2 2 1 +codeagent-wrapper/main.go:704.157,714.20 3 1 +codeagent-wrapper/main.go:714.20,716.3 1 1 +codeagent-wrapper/main.go:717.2,717.23 1 1 +codeagent-wrapper/main.go:717.23,719.3 1 1 +codeagent-wrapper/main.go:721.2,723.14 3 1 +codeagent-wrapper/main.go:723.14,725.3 1 1 +codeagent-wrapper/main.go:727.2,728.19 2 1 +codeagent-wrapper/main.go:728.19,730.3 1 1 +codeagent-wrapper/main.go:730.8,732.3 1 1 +codeagent-wrapper/main.go:734.2,734.39 1 1 +codeagent-wrapper/main.go:734.39,735.24 1 1 +codeagent-wrapper/main.go:735.24,737.4 1 1 +codeagent-wrapper/main.go:738.3,738.56 1 0 +codeagent-wrapper/main.go:741.2,745.12 4 1 +codeagent-wrapper/main.go:745.12,747.32 1 1 +codeagent-wrapper/main.go:747.32,748.47 1 1 +codeagent-wrapper/main.go:748.47,750.5 1 1 +codeagent-wrapper/main.go:752.3,752.32 1 1 +codeagent-wrapper/main.go:752.32,753.47 1 0 +codeagent-wrapper/main.go:753.47,755.5 1 0 +codeagent-wrapper/main.go:757.3,757.33 1 1 +codeagent-wrapper/main.go:757.33,758.47 1 0 +codeagent-wrapper/main.go:758.47,760.5 1 0 +codeagent-wrapper/main.go:762.8,763.32 1 1 +codeagent-wrapper/main.go:763.32,763.59 1 1 +codeagent-wrapper/main.go:764.3,764.32 1 1 +codeagent-wrapper/main.go:764.32,764.59 1 0 +codeagent-wrapper/main.go:765.3,765.33 1 1 +codeagent-wrapper/main.go:765.33,765.61 1 1 +codeagent-wrapper/main.go:768.2,774.37 5 1 +codeagent-wrapper/main.go:774.37,775.40 1 1 +codeagent-wrapper/main.go:775.40,778.4 2 1 +codeagent-wrapper/main.go:780.2,780.15 1 1 +codeagent-wrapper/main.go:780.15,781.24 1 1 +codeagent-wrapper/main.go:781.24,783.4 1 1 +codeagent-wrapper/main.go:786.2,786.13 1 1 +codeagent-wrapper/main.go:786.13,789.3 2 1 +codeagent-wrapper/main.go:791.2,792.16 2 1 +codeagent-wrapper/main.go:792.16,794.3 1 0 +codeagent-wrapper/main.go:796.2,801.42 5 1 +codeagent-wrapper/main.go:801.42,803.3 1 1 +codeagent-wrapper/main.go:805.2,808.25 3 1 +codeagent-wrapper/main.go:808.25,810.3 1 1 +codeagent-wrapper/main.go:811.2,811.13 1 1 +codeagent-wrapper/main.go:811.13,813.3 1 1 +codeagent-wrapper/main.go:814.2,814.29 1 1 +codeagent-wrapper/main.go:814.29,816.3 1 1 +codeagent-wrapper/main.go:816.8,818.3 1 1 +codeagent-wrapper/main.go:820.2,822.14 3 1 +codeagent-wrapper/main.go:822.14,824.17 2 1 +codeagent-wrapper/main.go:824.17,829.4 4 1 +codeagent-wrapper/main.go:832.2,833.16 2 1 +codeagent-wrapper/main.go:833.16,838.3 4 1 +codeagent-wrapper/main.go:840.2,841.25 2 1 +codeagent-wrapper/main.go:841.25,843.3 1 1 +codeagent-wrapper/main.go:845.2,847.36 2 1 +codeagent-wrapper/main.go:847.36,848.65 1 1 +codeagent-wrapper/main.go:848.65,854.4 5 1 +codeagent-wrapper/main.go:855.3,858.16 4 1 +codeagent-wrapper/main.go:861.2,862.45 2 1 +codeagent-wrapper/main.go:862.45,864.3 1 1 +codeagent-wrapper/main.go:866.2,866.34 1 1 +codeagent-wrapper/main.go:866.34,868.24 2 1 +codeagent-wrapper/main.go:868.24,871.4 2 1 +codeagent-wrapper/main.go:872.3,872.28 1 1 +codeagent-wrapper/main.go:875.2,876.12 2 1 +codeagent-wrapper/main.go:876.12,876.36 1 1 +codeagent-wrapper/main.go:878.2,879.12 2 1 +codeagent-wrapper/main.go:879.12,882.3 2 1 +codeagent-wrapper/main.go:884.2,887.9 3 1 +codeagent-wrapper/main.go:888.26,888.26 0 1 +codeagent-wrapper/main.go:889.20,892.21 3 1 +codeagent-wrapper/main.go:895.2,895.27 1 1 +codeagent-wrapper/main.go:895.27,897.3 1 1 +codeagent-wrapper/main.go:899.2,901.40 2 1 +codeagent-wrapper/main.go:901.40,902.50 1 1 +codeagent-wrapper/main.go:902.50,906.4 3 1 +codeagent-wrapper/main.go:907.3,909.16 3 1 +codeagent-wrapper/main.go:912.2,912.20 1 1 +codeagent-wrapper/main.go:912.20,913.51 1 1 +codeagent-wrapper/main.go:913.51,919.4 5 1 +codeagent-wrapper/main.go:920.3,923.16 4 0 +codeagent-wrapper/main.go:926.2,928.19 3 1 +codeagent-wrapper/main.go:928.19,933.3 4 1 +codeagent-wrapper/main.go:935.2,935.25 1 1 +codeagent-wrapper/main.go:935.25,937.3 1 1 +codeagent-wrapper/main.go:938.2,938.25 1 1 +codeagent-wrapper/main.go:938.25,940.3 1 1 +codeagent-wrapper/main.go:942.2,946.15 4 1 +codeagent-wrapper/main.go:954.51,955.18 1 1 +codeagent-wrapper/main.go:955.18,957.3 1 1 +codeagent-wrapper/main.go:959.2,959.23 1 1 +codeagent-wrapper/main.go:959.23,962.3 2 1 +codeagent-wrapper/main.go:964.2,965.22 2 1 +codeagent-wrapper/main.go:965.22,968.3 2 1 +codeagent-wrapper/main.go:970.2,972.20 3 1 +codeagent-wrapper/main.go:975.38,977.2 1 1 +codeagent-wrapper/main.go:979.82,983.12 3 1 +codeagent-wrapper/main.go:983.12,985.10 2 1 +codeagent-wrapper/main.go:986.23,988.26 2 0 +codeagent-wrapper/main.go:988.26,990.70 2 0 +codeagent-wrapper/main.go:990.70,991.28 1 0 +codeagent-wrapper/main.go:991.28,993.7 1 0 +codeagent-wrapper/main.go:996.21,996.21 0 1 +codeagent-wrapper/main.go:1001.47,1002.16 1 1 +codeagent-wrapper/main.go:1002.16,1004.3 1 0 +codeagent-wrapper/main.go:1006.2,1006.52 1 1 +codeagent-wrapper/main.go:1006.52,1008.3 1 1 +codeagent-wrapper/main.go:1010.2,1010.57 1 1 +codeagent-wrapper/main.go:1013.50,1014.38 1 1 +codeagent-wrapper/main.go:1014.38,1016.3 1 0 +codeagent-wrapper/main.go:1018.2,1020.74 2 1 +codeagent-wrapper/main.go:1020.74,1021.25 1 1 +codeagent-wrapper/main.go:1021.25,1023.4 1 1 +codeagent-wrapper/main.go:1027.62,1029.2 1 1 +codeagent-wrapper/main.go:1031.91,1033.2 1 1 +codeagent-wrapper/main.go:1035.111,1039.19 3 1 +codeagent-wrapper/main.go:1039.19,1040.25 1 0 +codeagent-wrapper/main.go:1040.26,1040.27 0 0 +codeagent-wrapper/main.go:1042.2,1042.19 1 1 +codeagent-wrapper/main.go:1042.19,1043.25 1 0 +codeagent-wrapper/main.go:1043.26,1043.27 0 0 +codeagent-wrapper/main.go:1046.2,1048.21 2 1 +codeagent-wrapper/main.go:1048.21,1050.17 2 1 +codeagent-wrapper/main.go:1050.17,1051.12 1 0 +codeagent-wrapper/main.go:1053.3,1056.62 3 1 +codeagent-wrapper/main.go:1056.62,1058.12 2 1 +codeagent-wrapper/main.go:1061.3,1062.27 2 1 +codeagent-wrapper/main.go:1062.27,1064.4 1 1 +codeagent-wrapper/main.go:1065.3,1065.49 1 1 +codeagent-wrapper/main.go:1065.49,1067.4 1 1 +codeagent-wrapper/main.go:1068.3,1068.23 1 1 +codeagent-wrapper/main.go:1068.23,1070.4 1 1 +codeagent-wrapper/main.go:1070.9,1072.4 1 1 +codeagent-wrapper/main.go:1074.3,1074.21 1 1 +codeagent-wrapper/main.go:1075.25,1077.70 2 1 +codeagent-wrapper/main.go:1078.25,1081.25 3 1 +codeagent-wrapper/main.go:1081.25,1084.5 2 1 +codeagent-wrapper/main.go:1085.4,1086.83 2 1 +codeagent-wrapper/main.go:1086.83,1088.5 1 1 +codeagent-wrapper/main.go:1092.2,1092.65 1 1 +codeagent-wrapper/main.go:1092.65,1094.3 1 0 +codeagent-wrapper/main.go:1096.2,1097.26 2 1 +codeagent-wrapper/main.go:1100.93,1103.20 2 1 +codeagent-wrapper/main.go:1103.20,1104.44 1 0 +codeagent-wrapper/main.go:1104.44,1106.4 1 0 +codeagent-wrapper/main.go:1109.2,1114.19 5 1 +codeagent-wrapper/main.go:1114.19,1116.3 1 1 +codeagent-wrapper/main.go:1118.2,1119.25 2 1 +codeagent-wrapper/main.go:1119.25,1121.3 1 1 +codeagent-wrapper/main.go:1123.2,1123.81 1 0 +codeagent-wrapper/main.go:1126.45,1127.26 1 1 +codeagent-wrapper/main.go:1128.14,1129.11 1 1 +codeagent-wrapper/main.go:1130.21,1132.26 2 1 +codeagent-wrapper/main.go:1132.26,1133.34 1 1 +codeagent-wrapper/main.go:1133.34,1135.5 1 1 +codeagent-wrapper/main.go:1137.3,1137.21 1 1 +codeagent-wrapper/main.go:1138.10,1139.12 1 1 +codeagent-wrapper/main.go:1143.27,1145.15 2 1 +codeagent-wrapper/main.go:1145.15,1147.3 1 1 +codeagent-wrapper/main.go:1149.2,1150.31 2 1 +codeagent-wrapper/main.go:1150.31,1153.3 2 1 +codeagent-wrapper/main.go:1155.2,1155.20 1 1 +codeagent-wrapper/main.go:1155.20,1157.3 1 1 +codeagent-wrapper/main.go:1158.2,1158.15 1 1 +codeagent-wrapper/main.go:1161.31,1163.16 2 1 +codeagent-wrapper/main.go:1163.16,1165.3 1 0 +codeagent-wrapper/main.go:1166.2,1166.45 1 1 +codeagent-wrapper/main.go:1169.24,1171.2 1 1 +codeagent-wrapper/main.go:1173.46,1174.38 1 1 +codeagent-wrapper/main.go:1174.38,1176.3 1 1 +codeagent-wrapper/main.go:1177.2,1177.21 1 1 +codeagent-wrapper/main.go:1186.57,1187.17 1 1 +codeagent-wrapper/main.go:1187.17,1189.3 1 1 +codeagent-wrapper/main.go:1190.2,1190.51 1 1 +codeagent-wrapper/main.go:1193.51,1194.15 1 1 +codeagent-wrapper/main.go:1194.15,1196.3 1 0 +codeagent-wrapper/main.go:1197.2,1198.17 2 1 +codeagent-wrapper/main.go:1198.17,1199.48 1 1 +codeagent-wrapper/main.go:1199.48,1203.12 4 1 +codeagent-wrapper/main.go:1205.3,1206.8 2 1 +codeagent-wrapper/main.go:1208.2,1208.19 1 1 +codeagent-wrapper/main.go:1211.30,1212.36 1 1 +codeagent-wrapper/main.go:1212.36,1214.3 1 1 +codeagent-wrapper/main.go:1215.2,1215.19 1 1 +codeagent-wrapper/main.go:1218.42,1219.15 1 1 +codeagent-wrapper/main.go:1219.15,1221.3 1 0 +codeagent-wrapper/main.go:1222.2,1224.26 3 1 +codeagent-wrapper/main.go:1224.26,1226.3 1 1 +codeagent-wrapper/main.go:1227.2,1227.44 1 1 +codeagent-wrapper/main.go:1227.44,1229.17 2 1 +codeagent-wrapper/main.go:1229.17,1231.4 1 0 +codeagent-wrapper/main.go:1231.9,1233.4 1 1 +codeagent-wrapper/main.go:1235.2,1235.27 1 1 +codeagent-wrapper/main.go:1238.44,1239.22 1 1 +codeagent-wrapper/main.go:1239.22,1241.3 1 1 +codeagent-wrapper/main.go:1242.2,1242.16 1 1 +codeagent-wrapper/main.go:1242.16,1244.3 1 0 +codeagent-wrapper/main.go:1245.2,1245.27 1 1 +codeagent-wrapper/main.go:1248.24,1249.11 1 1 +codeagent-wrapper/main.go:1249.11,1251.3 1 1 +codeagent-wrapper/main.go:1252.2,1252.10 1 1 +codeagent-wrapper/main.go:1255.27,1257.2 1 1 +codeagent-wrapper/main.go:1259.26,1261.19 2 1 +codeagent-wrapper/main.go:1261.19,1263.3 1 1 +codeagent-wrapper/main.go:1264.2,1264.23 1 1 +codeagent-wrapper/main.go:1267.29,1269.2 1 1 +codeagent-wrapper/main.go:1271.21,1273.2 1 1 +codeagent-wrapper/main.go:1275.32,1277.2 1 1 +codeagent-wrapper/main.go:1279.35,1281.2 1 1 +codeagent-wrapper/main.go:1283.26,1284.45 1 1 +codeagent-wrapper/main.go:1284.45,1286.3 1 1 +codeagent-wrapper/main.go:1289.26,1290.45 1 1 +codeagent-wrapper/main.go:1290.45,1292.3 1 1 +codeagent-wrapper/main.go:1295.27,1296.45 1 1 +codeagent-wrapper/main.go:1296.45,1298.3 1 1 +codeagent-wrapper/main.go:1301.23,1302.45 1 1 +codeagent-wrapper/main.go:1302.45,1304.3 1 1 +codeagent-wrapper/main.go:1305.2,1305.24 1 1 +codeagent-wrapper/main.go:1305.24,1307.3 1 1 +codeagent-wrapper/main.go:1310.18,1339.2 2 1 diff --git a/codeagent-wrapper/executor.go b/codeagent-wrapper/executor.go new file mode 100644 index 0000000..cc7e4ca --- /dev/null +++ b/codeagent-wrapper/executor.go @@ -0,0 +1,528 @@ +package main + +import ( + "context" + "errors" + "fmt" + "io" + "os" + "os/exec" + "os/signal" + "sort" + "strings" + "sync" + "syscall" + "time" +) + +type parseResult struct { + message string + threadID string +} + +var runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult { + if task.WorkDir == "" { + task.WorkDir = defaultWorkdir + } + if task.Mode == "" { + task.Mode = "new" + } + if task.UseStdin || shouldUseStdin(task.Task, false) { + task.UseStdin = true + } + + return runCodexTask(task, true, timeout) +} + +func topologicalSort(tasks []TaskSpec) ([][]TaskSpec, error) { + idToTask := make(map[string]TaskSpec, len(tasks)) + indegree := make(map[string]int, len(tasks)) + adj := make(map[string][]string, len(tasks)) + + for _, task := range tasks { + idToTask[task.ID] = task + indegree[task.ID] = 0 + } + + for _, task := range tasks { + for _, dep := range task.Dependencies { + if _, ok := idToTask[dep]; !ok { + return nil, fmt.Errorf("dependency %q not found for task %q", dep, task.ID) + } + indegree[task.ID]++ + adj[dep] = append(adj[dep], task.ID) + } + } + + queue := make([]string, 0, len(tasks)) + for _, task := range tasks { + if indegree[task.ID] == 0 { + queue = append(queue, task.ID) + } + } + + layers := make([][]TaskSpec, 0) + processed := 0 + + for len(queue) > 0 { + current := queue + queue = nil + layer := make([]TaskSpec, len(current)) + for i, id := range current { + layer[i] = idToTask[id] + processed++ + } + layers = append(layers, layer) + + next := make([]string, 0) + for _, id := range current { + for _, neighbor := range adj[id] { + indegree[neighbor]-- + if indegree[neighbor] == 0 { + next = append(next, neighbor) + } + } + } + queue = append(queue, next...) + } + + if processed != len(tasks) { + cycleIDs := make([]string, 0) + for id, deg := range indegree { + if deg > 0 { + cycleIDs = append(cycleIDs, id) + } + } + sort.Strings(cycleIDs) + return nil, fmt.Errorf("cycle detected involving tasks: %s", strings.Join(cycleIDs, ",")) + } + + return layers, nil +} + +func executeConcurrent(layers [][]TaskSpec, timeout int) []TaskResult { + totalTasks := 0 + for _, layer := range layers { + totalTasks += len(layer) + } + + results := make([]TaskResult, 0, totalTasks) + failed := make(map[string]TaskResult, totalTasks) + resultsCh := make(chan TaskResult, totalTasks) + + for _, layer := range layers { + var wg sync.WaitGroup + executed := 0 + + for _, task := range layer { + if skip, reason := shouldSkipTask(task, failed); skip { + res := TaskResult{TaskID: task.ID, ExitCode: 1, Error: reason} + results = append(results, res) + failed[task.ID] = res + continue + } + + executed++ + wg.Add(1) + go func(ts TaskSpec) { + defer wg.Done() + defer func() { + if r := recover(); r != nil { + resultsCh <- TaskResult{TaskID: ts.ID, ExitCode: 1, Error: fmt.Sprintf("panic: %v", r)} + } + }() + resultsCh <- runCodexTaskFn(ts, timeout) + }(task) + } + + wg.Wait() + + for i := 0; i < executed; i++ { + res := <-resultsCh + results = append(results, res) + if res.ExitCode != 0 || res.Error != "" { + failed[res.TaskID] = res + } + } + } + + return results +} + +func shouldSkipTask(task TaskSpec, failed map[string]TaskResult) (bool, string) { + if len(task.Dependencies) == 0 { + return false, "" + } + + var blocked []string + for _, dep := range task.Dependencies { + if _, ok := failed[dep]; ok { + blocked = append(blocked, dep) + } + } + + if len(blocked) == 0 { + return false, "" + } + + return true, fmt.Sprintf("skipped due to failed dependencies: %s", strings.Join(blocked, ",")) +} + +func generateFinalOutput(results []TaskResult) string { + var sb strings.Builder + + success := 0 + failed := 0 + for _, res := range results { + if res.ExitCode == 0 && res.Error == "" { + success++ + } else { + failed++ + } + } + + sb.WriteString(fmt.Sprintf("=== Parallel Execution Summary ===\n")) + sb.WriteString(fmt.Sprintf("Total: %d | Success: %d | Failed: %d\n\n", len(results), success, failed)) + + for _, res := range results { + sb.WriteString(fmt.Sprintf("--- Task: %s ---\n", res.TaskID)) + if res.Error != "" { + sb.WriteString(fmt.Sprintf("Status: FAILED (exit code %d)\nError: %s\n", res.ExitCode, res.Error)) + } else if res.ExitCode != 0 { + sb.WriteString(fmt.Sprintf("Status: FAILED (exit code %d)\n", res.ExitCode)) + } else { + sb.WriteString("Status: SUCCESS\n") + } + if res.SessionID != "" { + sb.WriteString(fmt.Sprintf("Session: %s\n", res.SessionID)) + } + if res.Message != "" { + sb.WriteString(fmt.Sprintf("\n%s\n", res.Message)) + } + sb.WriteString("\n") + } + + return sb.String() +} + +func buildCodexArgs(cfg *Config, targetArg string) []string { + if cfg.Mode == "resume" { + return []string{ + "e", + "--skip-git-repo-check", + "--json", + "resume", + cfg.SessionID, + targetArg, + } + } + return []string{ + "e", + "--skip-git-repo-check", + "-C", cfg.WorkDir, + "--json", + targetArg, + } +} + +func runCodexTask(taskSpec TaskSpec, silent bool, timeoutSec int) TaskResult { + return runCodexTaskWithContext(context.Background(), taskSpec, nil, false, silent, timeoutSec) +} + +func runCodexProcess(parentCtx context.Context, codexArgs []string, taskText string, useStdin bool, timeoutSec int) (message, threadID string, exitCode int) { + res := runCodexTaskWithContext(parentCtx, TaskSpec{Task: taskText, WorkDir: defaultWorkdir, Mode: "new", UseStdin: useStdin}, codexArgs, true, false, timeoutSec) + return res.Message, res.SessionID, res.ExitCode +} + +func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, customArgs []string, useCustomArgs bool, silent bool, timeoutSec int) TaskResult { + result := TaskResult{TaskID: taskSpec.ID} + + cfg := &Config{ + Mode: taskSpec.Mode, + Task: taskSpec.Task, + SessionID: taskSpec.SessionID, + WorkDir: taskSpec.WorkDir, + Backend: defaultBackendName, + } + if cfg.Mode == "" { + cfg.Mode = "new" + } + if cfg.WorkDir == "" { + cfg.WorkDir = defaultWorkdir + } + + useStdin := taskSpec.UseStdin + targetArg := taskSpec.Task + if useStdin { + targetArg = "-" + } + + var codexArgs []string + if useCustomArgs { + codexArgs = customArgs + } else { + codexArgs = buildCodexArgsFn(cfg, targetArg) + } + + prefixMsg := func(msg string) string { + if taskSpec.ID == "" { + return msg + } + return fmt.Sprintf("[Task: %s] %s", taskSpec.ID, msg) + } + + var logInfoFn func(string) + var logWarnFn func(string) + var logErrorFn func(string) + + if silent { + // Silent mode: only persist to file when available; avoid stderr noise. + logInfoFn = func(msg string) { + if logger := activeLogger(); logger != nil { + logger.Info(prefixMsg(msg)) + } + } + logWarnFn = func(msg string) { + if logger := activeLogger(); logger != nil { + logger.Warn(prefixMsg(msg)) + } + } + logErrorFn = func(msg string) { + if logger := activeLogger(); logger != nil { + logger.Error(prefixMsg(msg)) + } + } + } else { + logInfoFn = func(msg string) { logInfo(prefixMsg(msg)) } + logWarnFn = func(msg string) { logWarn(prefixMsg(msg)) } + logErrorFn = func(msg string) { logError(prefixMsg(msg)) } + } + + stderrBuf := &tailBuffer{limit: stderrCaptureLimit} + + var stdoutLogger *logWriter + var stderrLogger *logWriter + + var tempLogger *Logger + if silent && activeLogger() == nil { + if l, err := NewLogger(); err == nil { + setLogger(l) + tempLogger = l + } + } + defer func() { + if tempLogger != nil { + _ = closeLogger() + } + }() + + if !silent { + stdoutLogger = newLogWriter("CODEX_STDOUT: ", codexLogLineLimit) + stderrLogger = newLogWriter("CODEX_STDERR: ", codexLogLineLimit) + } + + ctx := parentCtx + if ctx == nil { + ctx = context.Background() + } + + ctx, cancel := context.WithTimeout(ctx, time.Duration(timeoutSec)*time.Second) + defer cancel() + ctx, stop := signal.NotifyContext(ctx, syscall.SIGINT, syscall.SIGTERM) + defer stop() + + attachStderr := func(msg string) string { + return fmt.Sprintf("%s; stderr: %s", msg, stderrBuf.String()) + } + + cmd := commandContext(ctx, codexCommand, codexArgs...) + + stderrWriters := []io.Writer{stderrBuf} + if stderrLogger != nil { + stderrWriters = append(stderrWriters, stderrLogger) + } + if !silent { + stderrWriters = append([]io.Writer{os.Stderr}, stderrWriters...) + } + if len(stderrWriters) == 1 { + cmd.Stderr = stderrWriters[0] + } else { + cmd.Stderr = io.MultiWriter(stderrWriters...) + } + + var stdinPipe io.WriteCloser + var err error + if useStdin { + stdinPipe, err = cmd.StdinPipe() + if err != nil { + logErrorFn("Failed to create stdin pipe: " + err.Error()) + result.ExitCode = 1 + result.Error = attachStderr("failed to create stdin pipe: " + err.Error()) + return result + } + } + + stdout, err := cmd.StdoutPipe() + if err != nil { + logErrorFn("Failed to create stdout pipe: " + err.Error()) + result.ExitCode = 1 + result.Error = attachStderr("failed to create stdout pipe: " + err.Error()) + return result + } + + stdoutReader := io.Reader(stdout) + if stdoutLogger != nil { + stdoutReader = io.TeeReader(stdout, stdoutLogger) + } + + logInfoFn(fmt.Sprintf("Starting %s with args: %s %s...", codexCommand, codexCommand, strings.Join(codexArgs[:min(5, len(codexArgs))], " "))) + + if err := cmd.Start(); err != nil { + if strings.Contains(err.Error(), "executable file not found") { + msg := fmt.Sprintf("%s command not found in PATH", codexCommand) + logErrorFn(msg) + result.ExitCode = 127 + result.Error = attachStderr(msg) + return result + } + logErrorFn("Failed to start " + codexCommand + ": " + err.Error()) + result.ExitCode = 1 + result.Error = attachStderr("failed to start " + codexCommand + ": " + err.Error()) + return result + } + + logInfoFn(fmt.Sprintf("Starting %s with PID: %d", codexCommand, cmd.Process.Pid)) + if logger := activeLogger(); logger != nil { + logInfoFn(fmt.Sprintf("Log capturing to: %s", logger.Path())) + } + + if useStdin && stdinPipe != nil { + logInfoFn(fmt.Sprintf("Writing %d chars to stdin...", len(taskSpec.Task))) + go func(data string) { + defer stdinPipe.Close() + _, _ = io.WriteString(stdinPipe, data) + }(taskSpec.Task) + logInfoFn("Stdin closed") + } + + waitCh := make(chan error, 1) + go func() { waitCh <- cmd.Wait() }() + + parseCh := make(chan parseResult, 1) + go func() { + msg, tid := parseJSONStreamWithLog(stdoutReader, logWarnFn, logInfoFn) + parseCh <- parseResult{message: msg, threadID: tid} + }() + + var waitErr error + var forceKillTimer *time.Timer + + select { + case waitErr = <-waitCh: + case <-ctx.Done(): + logErrorFn(cancelReason(ctx)) + forceKillTimer = terminateProcess(cmd) + waitErr = <-waitCh + } + + if forceKillTimer != nil { + forceKillTimer.Stop() + } + + parsed := <-parseCh + + if ctxErr := ctx.Err(); ctxErr != nil { + if errors.Is(ctxErr, context.DeadlineExceeded) { + result.ExitCode = 124 + result.Error = attachStderr(fmt.Sprintf("%s execution timeout", codexCommand)) + return result + } + result.ExitCode = 130 + result.Error = attachStderr("execution cancelled") + return result + } + + if waitErr != nil { + if exitErr, ok := waitErr.(*exec.ExitError); ok { + code := exitErr.ExitCode() + logErrorFn(fmt.Sprintf("%s exited with status %d", codexCommand, code)) + result.ExitCode = code + result.Error = attachStderr(fmt.Sprintf("%s exited with status %d", codexCommand, code)) + return result + } + logErrorFn(codexCommand + " error: " + waitErr.Error()) + result.ExitCode = 1 + result.Error = attachStderr(codexCommand + " error: " + waitErr.Error()) + return result + } + + message := parsed.message + threadID := parsed.threadID + if message == "" { + logErrorFn(fmt.Sprintf("%s completed without agent_message output", codexCommand)) + result.ExitCode = 1 + result.Error = attachStderr(fmt.Sprintf("%s completed without agent_message output", codexCommand)) + return result + } + + if stdoutLogger != nil { + stdoutLogger.Flush() + } + if stderrLogger != nil { + stderrLogger.Flush() + } + + result.ExitCode = 0 + result.Message = message + result.SessionID = threadID + + return result +} + +func forwardSignals(ctx context.Context, cmd *exec.Cmd, logErrorFn func(string)) { + sigCh := make(chan os.Signal, 1) + signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) + + go func() { + defer signal.Stop(sigCh) + select { + case sig := <-sigCh: + logErrorFn(fmt.Sprintf("Received signal: %v", sig)) + if cmd.Process != nil { + _ = cmd.Process.Signal(syscall.SIGTERM) + time.AfterFunc(time.Duration(forceKillDelay)*time.Second, func() { + if cmd.Process != nil { + _ = cmd.Process.Kill() + } + }) + } + case <-ctx.Done(): + } + }() +} + +func cancelReason(ctx context.Context) string { + if ctx == nil { + return "Context cancelled" + } + + if errors.Is(ctx.Err(), context.DeadlineExceeded) { + return fmt.Sprintf("%s execution timeout", codexCommand) + } + + return "Execution cancelled, terminating codex process" +} + +func terminateProcess(cmd *exec.Cmd) *time.Timer { + if cmd == nil || cmd.Process == nil { + return nil + } + + _ = cmd.Process.Signal(syscall.SIGTERM) + + return time.AfterFunc(time.Duration(forceKillDelay)*time.Second, func() { + if cmd.Process != nil { + _ = cmd.Process.Kill() + } + }) +} diff --git a/codeagent-wrapper/go.mod b/codeagent-wrapper/go.mod new file mode 100644 index 0000000..ae7aa47 --- /dev/null +++ b/codeagent-wrapper/go.mod @@ -0,0 +1,3 @@ +module codeagent-wrapper + +go 1.21 diff --git a/codex-wrapper/logger.go b/codeagent-wrapper/logger.go similarity index 89% rename from codex-wrapper/logger.go rename to codeagent-wrapper/logger.go index bba546b..b187caa 100644 --- a/codex-wrapper/logger.go +++ b/codeagent-wrapper/logger.go @@ -41,15 +41,15 @@ func NewLogger() (*Logger, error) { // NewLoggerWithSuffix creates a logger with an optional suffix in the filename. // Useful for tests that need isolated log files within the same process. func NewLoggerWithSuffix(suffix string) (*Logger, error) { - filename := fmt.Sprintf("codex-wrapper-%d", os.Getpid()) + filename := fmt.Sprintf("codeagent-wrapper-%d", os.Getpid()) if suffix != "" { filename += "-" + suffix } filename += ".log" - path := filepath.Join(os.TempDir(), filename) + path := filepath.Clean(filepath.Join(os.TempDir(), filename)) - f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o644) + f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o600) if err != nil { return nil, err } @@ -133,7 +133,7 @@ func (l *Logger) Close() error { } // Log file is kept for debugging - NOT removed - // Users can manually clean up /tmp/codex-wrapper-*.log files + // Users can manually clean up /tmp/codeagent-wrapper-*.log files }) return closeErr @@ -218,25 +218,25 @@ func (l *Logger) run() { defer ticker.Stop() for { - select { - case entry, ok := <-l.ch: - if !ok { - // Channel closed, final flush - l.writer.Flush() - return - } - timestamp := time.Now().Format("2006-01-02 15:04:05.000") - pid := os.Getpid() - fmt.Fprintf(l.writer, "[%s] [PID:%d] %s: %s\n", timestamp, pid, entry.level, entry.msg) + select { + case entry, ok := <-l.ch: + if !ok { + // Channel closed, final flush + _ = l.writer.Flush() + return + } + timestamp := time.Now().Format("2006-01-02 15:04:05.000") + pid := os.Getpid() + fmt.Fprintf(l.writer, "[%s] [PID:%d] %s: %s\n", timestamp, pid, entry.level, entry.msg) l.pendingWG.Done() case <-ticker.C: - l.writer.Flush() + _ = l.writer.Flush() case flushDone := <-l.flushReq: // Explicit flush request - flush writer and sync to disk - l.writer.Flush() - l.file.Sync() + _ = l.writer.Flush() + _ = l.file.Sync() close(flushDone) } } diff --git a/codex-wrapper/logger_test.go b/codeagent-wrapper/logger_test.go similarity index 97% rename from codex-wrapper/logger_test.go rename to codeagent-wrapper/logger_test.go index 6d2b8bb..213a6b0 100644 --- a/codex-wrapper/logger_test.go +++ b/codeagent-wrapper/logger_test.go @@ -22,7 +22,7 @@ func TestLoggerCreatesFileWithPID(t *testing.T) { } defer logger.Close() - expectedPath := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid())) + expectedPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid())) if logger.Path() != expectedPath { t.Fatalf("logger path = %s, want %s", logger.Path(), expectedPath) } diff --git a/codeagent-wrapper/main.go b/codeagent-wrapper/main.go new file mode 100644 index 0000000..18c72ed --- /dev/null +++ b/codeagent-wrapper/main.go @@ -0,0 +1,327 @@ +package main + +import ( + "encoding/json" + "fmt" + "io" + "os" + "os/exec" + "strings" + "sync/atomic" +) + +const ( + version = "5.0.0" + defaultWorkdir = "." + defaultTimeout = 7200 // seconds + codexLogLineLimit = 1000 + stdinSpecialChars = "\n\\\"'`$" + stderrCaptureLimit = 4 * 1024 + defaultBackendName = "codex" + wrapperName = "codeagent-wrapper" +) + +// Test hooks for dependency injection +var ( + stdinReader io.Reader = os.Stdin + isTerminalFn = defaultIsTerminal + codexCommand = "codex" + cleanupHook func() + loggerPtr atomic.Pointer[Logger] + + buildCodexArgsFn = buildCodexArgs + selectBackendFn = selectBackend + commandContext = exec.CommandContext + jsonMarshal = json.Marshal + forceKillDelay = 5 // seconds - made variable for testability +) + +func main() { + exitCode := run() + os.Exit(exitCode) +} + +// run is the main logic, returns exit code for testability +func run() (exitCode int) { + // Handle --version and --help first (no logger needed) + if len(os.Args) > 1 { + switch os.Args[1] { + case "--version", "-v": + fmt.Printf("%s version %s\n", wrapperName, version) + return 0 + case "--help", "-h": + printHelp() + return 0 + } + } + + // Initialize logger for all other commands + logger, err := NewLogger() + if err != nil { + fmt.Fprintf(os.Stderr, "ERROR: failed to initialize logger: %v\n", err) + return 1 + } + setLogger(logger) + + defer func() { + logger := activeLogger() + if logger != nil { + logger.Flush() + } + if err := closeLogger(); err != nil { + fmt.Fprintf(os.Stderr, "ERROR: failed to close logger: %v\n", err) + } + // Always remove log file after completion + if logger != nil { + if err := logger.RemoveLogFile(); err != nil && !os.IsNotExist(err) { + // Silently ignore removal errors + } + } + }() + defer runCleanupHook() + + // Handle remaining commands + if len(os.Args) > 1 { + switch os.Args[1] { + case "--parallel": + if len(os.Args) > 2 { + fmt.Fprintln(os.Stderr, "ERROR: --parallel reads its task configuration from stdin and does not accept additional arguments.") + fmt.Fprintln(os.Stderr, "Usage examples:") + fmt.Fprintf(os.Stderr, " %s --parallel < tasks.txt\n", wrapperName) + fmt.Fprintf(os.Stderr, " echo '...' | %s --parallel\n", wrapperName) + fmt.Fprintf(os.Stderr, " %s --parallel <<'EOF'\n", wrapperName) + return 1 + } + data, err := io.ReadAll(stdinReader) + if err != nil { + fmt.Fprintf(os.Stderr, "ERROR: failed to read stdin: %v\n", err) + return 1 + } + + cfg, err := parseParallelConfig(data) + if err != nil { + fmt.Fprintf(os.Stderr, "ERROR: %v\n", err) + return 1 + } + + timeoutSec := resolveTimeout() + layers, err := topologicalSort(cfg.Tasks) + if err != nil { + fmt.Fprintf(os.Stderr, "ERROR: %v\n", err) + return 1 + } + + results := executeConcurrent(layers, timeoutSec) + fmt.Println(generateFinalOutput(results)) + + exitCode = 0 + for _, res := range results { + if res.ExitCode != 0 { + exitCode = res.ExitCode + } + } + + return exitCode + } + } + + logInfo("Script started") + + cfg, err := parseArgs() + if err != nil { + logError(err.Error()) + return 1 + } + logInfo(fmt.Sprintf("Parsed args: mode=%s, task_len=%d, backend=%s", cfg.Mode, len(cfg.Task), cfg.Backend)) + + backend, err := selectBackendFn(cfg.Backend) + if err != nil { + logError(err.Error()) + return 1 + } + // Wire selected backend into runtime hooks for the rest of the execution. + codexCommand = backend.Command() + buildCodexArgsFn = backend.BuildArgs + cfg.Backend = backend.Name() + logInfo(fmt.Sprintf("Selected backend: %s", backend.Name())) + + timeoutSec := resolveTimeout() + logInfo(fmt.Sprintf("Timeout: %ds", timeoutSec)) + cfg.Timeout = timeoutSec + + var taskText string + var piped bool + + if cfg.ExplicitStdin { + logInfo("Explicit stdin mode: reading task from stdin") + data, err := io.ReadAll(stdinReader) + if err != nil { + logError("Failed to read stdin: " + err.Error()) + return 1 + } + taskText = string(data) + if taskText == "" { + logError("Explicit stdin mode requires task input from stdin") + return 1 + } + piped = !isTerminal() + } else { + pipedTask, err := readPipedTask() + if err != nil { + logError("Failed to read piped stdin: " + err.Error()) + return 1 + } + piped = pipedTask != "" + if piped { + taskText = pipedTask + } else { + taskText = cfg.Task + } + } + + useStdin := cfg.ExplicitStdin || shouldUseStdin(taskText, piped) + + targetArg := taskText + if useStdin { + targetArg = "-" + } + codexArgs := buildCodexArgsFn(cfg, targetArg) + + // Print startup information to stderr + fmt.Fprintf(os.Stderr, "[%s]\n", wrapperName) + fmt.Fprintf(os.Stderr, " Backend: %s\n", cfg.Backend) + fmt.Fprintf(os.Stderr, " Command: %s %s\n", codexCommand, strings.Join(codexArgs, " ")) + fmt.Fprintf(os.Stderr, " PID: %d\n", os.Getpid()) + fmt.Fprintf(os.Stderr, " Log: %s\n", logger.Path()) + + if useStdin { + var reasons []string + if piped { + reasons = append(reasons, "piped input") + } + if cfg.ExplicitStdin { + reasons = append(reasons, "explicit \"-\"") + } + if strings.Contains(taskText, "\n") { + reasons = append(reasons, "newline") + } + if strings.Contains(taskText, "\\") { + reasons = append(reasons, "backslash") + } + if strings.Contains(taskText, "\"") { + reasons = append(reasons, "double-quote") + } + if strings.Contains(taskText, "'") { + reasons = append(reasons, "single-quote") + } + if strings.Contains(taskText, "`") { + reasons = append(reasons, "backtick") + } + if strings.Contains(taskText, "$") { + reasons = append(reasons, "dollar") + } + if len(taskText) > 800 { + reasons = append(reasons, "length>800") + } + if len(reasons) > 0 { + logWarn(fmt.Sprintf("Using stdin mode for task due to: %s", strings.Join(reasons, ", "))) + } + } + + logInfo(fmt.Sprintf("%s running...", cfg.Backend)) + + taskSpec := TaskSpec{ + Task: taskText, + WorkDir: cfg.WorkDir, + Mode: cfg.Mode, + SessionID: cfg.SessionID, + UseStdin: useStdin, + } + + result := runCodexTask(taskSpec, false, cfg.Timeout) + + if result.ExitCode != 0 { + return result.ExitCode + } + + fmt.Println(result.Message) + if result.SessionID != "" { + fmt.Printf("\n---\nSESSION_ID: %s\n", result.SessionID) + } + + return 0 +} + +func setLogger(l *Logger) { + loggerPtr.Store(l) +} + +func closeLogger() error { + logger := loggerPtr.Swap(nil) + if logger == nil { + return nil + } + return logger.Close() +} + +func activeLogger() *Logger { + return loggerPtr.Load() +} + +func logInfo(msg string) { + if logger := activeLogger(); logger != nil { + logger.Info(msg) + } +} + +func logWarn(msg string) { + if logger := activeLogger(); logger != nil { + logger.Warn(msg) + } +} + +func logError(msg string) { + if logger := activeLogger(); logger != nil { + logger.Error(msg) + } +} + +func runCleanupHook() { + if logger := activeLogger(); logger != nil { + logger.Flush() + } + if cleanupHook != nil { + cleanupHook() + } +} + +func printHelp() { + help := `codeagent-wrapper - Go wrapper for AI CLI backends + +Usage: + codeagent-wrapper "task" [workdir] + codeagent-wrapper --backend claude "task" [workdir] + codeagent-wrapper - [workdir] Read task from stdin + codeagent-wrapper resume "task" [workdir] + codeagent-wrapper resume - [workdir] + codeagent-wrapper --parallel Run tasks in parallel (config from stdin) + codeagent-wrapper --version + codeagent-wrapper --help + +Parallel mode examples: + codeagent-wrapper --parallel < tasks.txt + echo '...' | codeagent-wrapper --parallel + codeagent-wrapper --parallel <<'EOF' + +Environment Variables: + CODEX_TIMEOUT Timeout in milliseconds (default: 7200000) + +Exit Codes: + 0 Success + 1 General error (missing args, no output) + 124 Timeout + 127 backend command not found + 130 Interrupted (Ctrl+C) + * Passthrough from backend process` + fmt.Println(help) +} diff --git a/codex-wrapper/main_integration_test.go b/codeagent-wrapper/main_integration_test.go similarity index 97% rename from codex-wrapper/main_integration_test.go rename to codeagent-wrapper/main_integration_test.go index e5153d8..987b646 100644 --- a/codex-wrapper/main_integration_test.go +++ b/codeagent-wrapper/main_integration_test.go @@ -137,7 +137,7 @@ id: E ---CONTENT--- task-e` stdinReader = bytes.NewReader([]byte(input)) - os.Args = []string{"codex-wrapper", "--parallel"} + os.Args = []string{"codeagent-wrapper", "--parallel"} var mu sync.Mutex starts := make(map[string]time.Time) @@ -240,7 +240,7 @@ dependencies: A ---CONTENT--- b` stdinReader = bytes.NewReader([]byte(input)) - os.Args = []string{"codex-wrapper", "--parallel"} + os.Args = []string{"codeagent-wrapper", "--parallel"} exitCode := 0 output := captureStdout(t, func() { @@ -288,7 +288,7 @@ id: E ---CONTENT--- ok-e` stdinReader = bytes.NewReader([]byte(input)) - os.Args = []string{"codex-wrapper", "--parallel"} + os.Args = []string{"codeagent-wrapper", "--parallel"} var exitCode int output := captureStdout(t, func() { @@ -340,7 +340,7 @@ id: T ---CONTENT--- slow` stdinReader = bytes.NewReader([]byte(input)) - os.Args = []string{"codex-wrapper", "--parallel"} + os.Args = []string{"codeagent-wrapper", "--parallel"} exitCode := 0 output := captureStdout(t, func() { diff --git a/codex-wrapper/main_test.go b/codeagent-wrapper/main_test.go similarity index 70% rename from codex-wrapper/main_test.go rename to codeagent-wrapper/main_test.go index a496baa..c1b5f01 100644 --- a/codex-wrapper/main_test.go +++ b/codeagent-wrapper/main_test.go @@ -1,6 +1,7 @@ package main import ( + "bufio" "bytes" "context" "encoding/json" @@ -26,6 +27,7 @@ func resetTestHooks() { codexCommand = "codex" cleanupHook = nil buildCodexArgsFn = buildCodexArgs + selectBackendFn = selectBackend commandContext = exec.CommandContext jsonMarshal = json.Marshal forceKillDelay = 5 @@ -47,6 +49,41 @@ func (e errReader) Read([]byte) (int, error) { return 0, e.err } +type testBackend struct { + name string + command string + argsFn func(*Config, string) []string +} + +func (t testBackend) Name() string { + if t.name != "" { + return t.name + } + return "test-backend" +} + +func (t testBackend) BuildArgs(cfg *Config, targetArg string) []string { + if t.argsFn != nil { + return t.argsFn(cfg, targetArg) + } + return []string{targetArg} +} + +func (t testBackend) Command() string { + if t.command != "" { + return t.command + } + return "echo" +} + +func withBackend(command string, argsFn func(*Config, string) []string) func() { + prev := selectBackendFn + selectBackendFn = func(name string) (Backend, error) { + return testBackend{name: name, command: command, argsFn: argsFn}, nil + } + return func() { selectBackendFn = prev } +} + func captureStdoutPipe() *capturedStdout { r, w, _ := os.Pipe() state := &capturedStdout{old: os.Stdout, reader: r, writer: w} @@ -106,25 +143,25 @@ func TestRunParseArgs_NewMode(t *testing.T) { }{ { name: "simple task", - args: []string{"codex-wrapper", "analyze code"}, - want: &Config{Mode: "new", Task: "analyze code", WorkDir: ".", ExplicitStdin: false}, + args: []string{"codeagent-wrapper", "analyze code"}, + want: &Config{Mode: "new", Task: "analyze code", WorkDir: ".", ExplicitStdin: false, Backend: defaultBackendName}, }, { name: "task with workdir", - args: []string{"codex-wrapper", "analyze code", "/path/to/dir"}, - want: &Config{Mode: "new", Task: "analyze code", WorkDir: "/path/to/dir", ExplicitStdin: false}, + args: []string{"codeagent-wrapper", "analyze code", "/path/to/dir"}, + want: &Config{Mode: "new", Task: "analyze code", WorkDir: "/path/to/dir", ExplicitStdin: false, Backend: defaultBackendName}, }, { name: "explicit stdin mode", - args: []string{"codex-wrapper", "-"}, - want: &Config{Mode: "new", Task: "-", WorkDir: ".", ExplicitStdin: true}, + args: []string{"codeagent-wrapper", "-"}, + want: &Config{Mode: "new", Task: "-", WorkDir: ".", ExplicitStdin: true, Backend: defaultBackendName}, }, { name: "stdin with workdir", - args: []string{"codex-wrapper", "-", "/some/dir"}, - want: &Config{Mode: "new", Task: "-", WorkDir: "/some/dir", ExplicitStdin: true}, + args: []string{"codeagent-wrapper", "-", "/some/dir"}, + want: &Config{Mode: "new", Task: "-", WorkDir: "/some/dir", ExplicitStdin: true, Backend: defaultBackendName}, }, - {name: "no args", args: []string{"codex-wrapper"}, wantErr: true}, + {name: "no args", args: []string{"codeagent-wrapper"}, wantErr: true}, } for _, tt := range tests { @@ -152,6 +189,9 @@ func TestRunParseArgs_NewMode(t *testing.T) { if cfg.ExplicitStdin != tt.want.ExplicitStdin { t.Errorf("ExplicitStdin = %v, want %v", cfg.ExplicitStdin, tt.want.ExplicitStdin) } + if cfg.Backend != tt.want.Backend { + t.Errorf("Backend = %v, want %v", cfg.Backend, tt.want.Backend) + } }) } } @@ -165,21 +205,21 @@ func TestRunParseArgs_ResumeMode(t *testing.T) { }{ { name: "resume with task", - args: []string{"codex-wrapper", "resume", "session-123", "continue task"}, - want: &Config{Mode: "resume", SessionID: "session-123", Task: "continue task", WorkDir: ".", ExplicitStdin: false}, + args: []string{"codeagent-wrapper", "resume", "session-123", "continue task"}, + want: &Config{Mode: "resume", SessionID: "session-123", Task: "continue task", WorkDir: ".", ExplicitStdin: false, Backend: defaultBackendName}, }, { name: "resume with workdir", - args: []string{"codex-wrapper", "resume", "session-456", "task", "/work"}, - want: &Config{Mode: "resume", SessionID: "session-456", Task: "task", WorkDir: "/work", ExplicitStdin: false}, + args: []string{"codeagent-wrapper", "resume", "session-456", "task", "/work"}, + want: &Config{Mode: "resume", SessionID: "session-456", Task: "task", WorkDir: "/work", ExplicitStdin: false, Backend: defaultBackendName}, }, { name: "resume with stdin", - args: []string{"codex-wrapper", "resume", "session-789", "-"}, - want: &Config{Mode: "resume", SessionID: "session-789", Task: "-", WorkDir: ".", ExplicitStdin: true}, + args: []string{"codeagent-wrapper", "resume", "session-789", "-"}, + want: &Config{Mode: "resume", SessionID: "session-789", Task: "-", WorkDir: ".", ExplicitStdin: true, Backend: defaultBackendName}, }, - {name: "resume missing session_id", args: []string{"codex-wrapper", "resume"}, wantErr: true}, - {name: "resume missing task", args: []string{"codex-wrapper", "resume", "session-123"}, wantErr: true}, + {name: "resume missing session_id", args: []string{"codeagent-wrapper", "resume"}, wantErr: true}, + {name: "resume missing task", args: []string{"codeagent-wrapper", "resume", "session-123"}, wantErr: true}, } for _, tt := range tests { @@ -198,6 +238,63 @@ func TestRunParseArgs_ResumeMode(t *testing.T) { if cfg.Mode != tt.want.Mode || cfg.SessionID != tt.want.SessionID || cfg.Task != tt.want.Task || cfg.WorkDir != tt.want.WorkDir || cfg.ExplicitStdin != tt.want.ExplicitStdin { t.Errorf("parseArgs() mismatch: %+v vs %+v", cfg, tt.want) } + if cfg.Backend != tt.want.Backend { + t.Errorf("Backend = %v, want %v", cfg.Backend, tt.want.Backend) + } + }) + } +} + +func TestRunParseArgs_BackendFlag(t *testing.T) { + tests := []struct { + name string + args []string + want string + wantErr bool + }{ + { + name: "claude backend", + args: []string{"codeagent-wrapper", "--backend", "claude", "task"}, + want: "claude", + }, + { + name: "gemini resume", + args: []string{"codeagent-wrapper", "--backend", "gemini", "resume", "sid", "task"}, + want: "gemini", + }, + { + name: "backend equals syntax", + args: []string{"codeagent-wrapper", "--backend=claude", "task"}, + want: "claude", + }, + { + name: "missing backend value", + args: []string{"codeagent-wrapper", "--backend"}, + wantErr: true, + }, + { + name: "backend equals missing value", + args: []string{"codeagent-wrapper", "--backend=", "task"}, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Args = tt.args + cfg, err := parseArgs() + if tt.wantErr { + if err == nil { + t.Fatalf("expected error, got nil") + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if cfg.Backend != tt.want { + t.Fatalf("Backend = %q, want %q", cfg.Backend, tt.want) + } }) } } @@ -349,6 +446,173 @@ func TestRunBuildCodexArgs_ResumeMode(t *testing.T) { } } +func TestSelectBackend(t *testing.T) { + tests := []struct { + name string + in string + kind Backend + }{ + {"codex", "codex", CodexBackend{}}, + {"claude mixed case", "ClAuDe", ClaudeBackend{}}, + {"gemini", "gemini", GeminiBackend{}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := selectBackend(tt.in) + if err != nil { + t.Fatalf("selectBackend() error = %v", err) + } + switch tt.kind.(type) { + case CodexBackend: + if _, ok := got.(CodexBackend); !ok { + t.Fatalf("expected CodexBackend, got %T", got) + } + case ClaudeBackend: + if _, ok := got.(ClaudeBackend); !ok { + t.Fatalf("expected ClaudeBackend, got %T", got) + } + case GeminiBackend: + if _, ok := got.(GeminiBackend); !ok { + t.Fatalf("expected GeminiBackend, got %T", got) + } + } + }) + } +} + +func TestSelectBackend_Invalid(t *testing.T) { + if _, err := selectBackend("unknown"); err == nil { + t.Fatalf("expected error for invalid backend") + } +} + +func TestSelectBackend_DefaultOnEmpty(t *testing.T) { + backend, err := selectBackend("") + if err != nil { + t.Fatalf("selectBackend(\"\") error = %v", err) + } + if _, ok := backend.(CodexBackend); !ok { + t.Fatalf("expected default CodexBackend, got %T", backend) + } +} + +func TestBackendBuildArgs_CodexBackend(t *testing.T) { + backend := CodexBackend{} + cfg := &Config{Mode: "new", WorkDir: "/test/dir"} + got := backend.BuildArgs(cfg, "task") + want := []string{"e", "--skip-git-repo-check", "-C", "/test/dir", "--json", "task"} + if len(got) != len(want) { + t.Fatalf("length mismatch") + } + for i := range want { + if got[i] != want[i] { + t.Fatalf("index %d got %s want %s", i, got[i], want[i]) + } + } +} + +func TestBackendBuildArgs_ClaudeBackend(t *testing.T) { + backend := ClaudeBackend{} + cfg := &Config{Mode: "new", WorkDir: defaultWorkdir} + got := backend.BuildArgs(cfg, "todo") + want := []string{"-p", "--dangerously-skip-permissions", "--output-format", "stream-json", "--verbose", "todo"} + if len(got) != len(want) { + t.Fatalf("length mismatch") + } + for i := range want { + if got[i] != want[i] { + t.Fatalf("index %d got %s want %s", i, got[i], want[i]) + } + } + + if backend.BuildArgs(nil, "ignored") != nil { + t.Fatalf("nil config should return nil args") + } +} + +func TestClaudeBackendBuildArgs_OutputValidation(t *testing.T) { + backend := ClaudeBackend{} + cfg := &Config{Mode: "resume"} + target := "ensure-flags" + + args := backend.BuildArgs(cfg, target) + expectedPrefix := []string{"-p", "--dangerously-skip-permissions", "--output-format", "stream-json", "--verbose"} + + if len(args) != len(expectedPrefix)+1 { + t.Fatalf("args length=%d, want %d", len(args), len(expectedPrefix)+1) + } + for i, val := range expectedPrefix { + if args[i] != val { + t.Fatalf("args[%d]=%q, want %q", i, args[i], val) + } + } + if args[len(args)-1] != target { + t.Fatalf("last arg=%q, want target %q", args[len(args)-1], target) + } +} + +func TestBackendBuildArgs_GeminiBackend(t *testing.T) { + backend := GeminiBackend{} + cfg := &Config{Mode: "new"} + got := backend.BuildArgs(cfg, "task") + want := []string{"-o", "stream-json", "-y", "-p", "task"} + if len(got) != len(want) { + t.Fatalf("length mismatch") + } + for i := range want { + if got[i] != want[i] { + t.Fatalf("index %d got %s want %s", i, got[i], want[i]) + } + } + + if backend.BuildArgs(nil, "ignored") != nil { + t.Fatalf("nil config should return nil args") + } +} + +func TestGeminiBackendBuildArgs_OutputValidation(t *testing.T) { + backend := GeminiBackend{} + cfg := &Config{Mode: "resume"} + target := "prompt-data" + + args := backend.BuildArgs(cfg, target) + expected := []string{"-o", "stream-json", "-y", "-p"} + + if len(args) != len(expected)+1 { + t.Fatalf("args length=%d, want %d", len(args), len(expected)+1) + } + for i, val := range expected { + if args[i] != val { + t.Fatalf("args[%d]=%q, want %q", i, args[i], val) + } + } + if args[len(args)-1] != target { + t.Fatalf("last arg=%q, want target %q", args[len(args)-1], target) + } +} + +func TestBackendNamesAndCommands(t *testing.T) { + tests := []Backend{CodexBackend{}, ClaudeBackend{}, GeminiBackend{}} + expected := []struct { + name string + command string + }{ + {"codex", "codex"}, + {"claude", "claude"}, + {"gemini", "gemini"}, + } + + for i, backend := range tests { + if backend.Name() != expected[i].name { + t.Fatalf("backend %d name = %s, want %s", i, backend.Name(), expected[i].name) + } + if backend.Command() != expected[i].command { + t.Fatalf("backend %d command = %s, want %s", i, backend.Command(), expected[i].command) + } + } +} + func TestRunResolveTimeout(t *testing.T) { tests := []struct { name string @@ -442,6 +706,36 @@ func TestParseJSONStream(t *testing.T) { } } +func TestParseJSONStream_ClaudeEvents(t *testing.T) { + input := `{"type":"system","subtype":"init","session_id":"abc123"} +{"type":"result","subtype":"success","result":"Hello!","session_id":"abc123"}` + + message, threadID := parseJSONStream(strings.NewReader(input)) + + if message != "Hello!" { + t.Fatalf("message=%q, want %q", message, "Hello!") + } + if threadID != "abc123" { + t.Fatalf("threadID=%q, want %q", threadID, "abc123") + } +} + +func TestParseJSONStream_GeminiEvents(t *testing.T) { + input := `{"type":"init","session_id":"xyz789"} +{"type":"message","role":"assistant","content":"Hi","delta":true,"session_id":"xyz789"} +{"type":"message","role":"assistant","content":" there","delta":true} +{"type":"result","status":"success","session_id":"xyz789"}` + + message, threadID := parseJSONStream(strings.NewReader(input)) + + if message != "Hi there" { + t.Fatalf("message=%q, want %q", message, "Hi there") + } + if threadID != "xyz789" { + t.Fatalf("threadID=%q, want %q", threadID, "xyz789") + } +} + func TestParseJSONStreamWithWarn_InvalidLine(t *testing.T) { var warnings []string warnFn := func(msg string) { warnings = append(warnings, msg) } @@ -454,6 +748,36 @@ func TestParseJSONStreamWithWarn_InvalidLine(t *testing.T) { } } +func TestDiscardInvalidJSON(t *testing.T) { + reader := bufio.NewReader(strings.NewReader("line1\nline2\n")) + newReader, err := discardInvalidJSON(nil, reader) + if err != nil && !errors.Is(err, io.EOF) { + t.Fatalf("unexpected error: %v", err) + } + line, _ := newReader.ReadString('\n') + if strings.TrimSpace(line) != "line2" { + t.Fatalf("expected to continue with remaining data, got %q", line) + } + + readerNoNewline := bufio.NewReader(strings.NewReader("no newline")) + if _, err := discardInvalidJSON(nil, readerNoNewline); err == nil { + t.Fatalf("expected error when no newline present") + } +} + +func TestHasKey(t *testing.T) { + raw := map[string]json.RawMessage{ + "present": json.RawMessage(`true`), + } + + if !hasKey(raw, "present") { + t.Fatalf("expected key 'present' to be found") + } + if hasKey(raw, "absent") { + t.Fatalf("did not expect key 'absent' to be found") + } +} + func TestRunGetEnv(t *testing.T) { tests := []struct { name string @@ -496,6 +820,7 @@ func TestRunTruncate(t *testing.T) { {"truncate", "hello world", 5, "hello..."}, {"empty", "", 5, ""}, {"zero maxLen", "hello", 0, "..."}, + {"negative maxLen", "hello", -1, ""}, } for _, tt := range tests { @@ -522,6 +847,26 @@ func TestRunMin(t *testing.T) { } } +func TestTailBufferWrite(t *testing.T) { + buf := &tailBuffer{limit: 5} + if n, _ := buf.Write([]byte("123")); n != 3 || buf.String() != "123" { + t.Fatalf("unexpected buffer content %q", buf.String()) + } + + if _, _ = buf.Write([]byte("4567")); buf.String() != "34567" { + t.Fatalf("overflow case mismatch, got %q", buf.String()) + } + + if _, _ = buf.Write([]byte("abcdefgh")); buf.String() != "defgh" { + t.Fatalf("len>=limit case mismatch, got %q", buf.String()) + } + + noLimit := &tailBuffer{limit: 0} + if _, _ = noLimit.Write([]byte("ignored")); noLimit.String() != "" { + t.Fatalf("limit<=0 should not retain data") + } +} + func TestRunLogFunctions(t *testing.T) { defer resetTestHooks() tempDir := t.TempDir() @@ -556,6 +901,64 @@ func TestRunLogFunctions(t *testing.T) { } } +func TestLoggerPathAndRemoveNil(t *testing.T) { + var logger *Logger + if logger.Path() != "" { + t.Fatalf("nil logger path should be empty") + } + if err := logger.RemoveLogFile(); err != nil { + t.Fatalf("expected nil logger RemoveLogFile to be no-op, got %v", err) + } +} + +func TestLoggerLogDropOnDone(t *testing.T) { + logger := &Logger{ + ch: make(chan logEntry), + done: make(chan struct{}), + } + close(logger.done) + logger.log("INFO", "dropped") + logger.pendingWG.Wait() +} + +func TestLoggerLogAfterClose(t *testing.T) { + defer resetTestHooks() + logger, err := NewLogger() + if err != nil { + t.Fatalf("NewLogger error: %v", err) + } + if err := logger.Close(); err != nil { + t.Fatalf("Close error: %v", err) + } + logger.log("INFO", "should be ignored") +} + +func TestLogWriterLogLine(t *testing.T) { + defer resetTestHooks() + logger, err := NewLogger() + if err != nil { + t.Fatalf("NewLogger error: %v", err) + } + setLogger(logger) + lw := &logWriter{prefix: "P:", maxLen: 3} + lw.buf.WriteString("abcdef") + lw.logLine(false) + lw.logLine(false) // empty buffer path + logger.Flush() + data, _ := os.ReadFile(logger.Path()) + if !strings.Contains(string(data), "P:abc") { + t.Fatalf("log output missing truncated entry, got %q", string(data)) + } + closeLogger() +} + +func TestNewLogWriterDefaultMaxLen(t *testing.T) { + lw := newLogWriter("X:", 0) + if lw.maxLen != codexLogLineLimit { + t.Fatalf("expected default maxLen %d, got %d", codexLogLineLimit, lw.maxLen) + } +} + func TestRunPrintHelp(t *testing.T) { oldStdout := os.Stdout r, w, _ := os.Pipe() @@ -568,7 +971,7 @@ func TestRunPrintHelp(t *testing.T) { io.Copy(&buf, r) output := buf.String() - expected := []string{"codex-wrapper", "Usage:", "resume", "CODEX_TIMEOUT", "Exit Codes:"} + expected := []string{"codeagent-wrapper", "Usage:", "resume", "CODEX_TIMEOUT", "Exit Codes:"} for _, phrase := range expected { if !strings.Contains(output, phrase) { t.Errorf("printHelp() missing phrase %q", phrase) @@ -656,7 +1059,7 @@ func TestRunCodexTask_StartError(t *testing.T) { buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} } res := runCodexTask(TaskSpec{Task: "task"}, false, 1) - if res.ExitCode != 1 || !strings.Contains(res.Error, "failed to start codex") { + if res.ExitCode != 1 || !strings.Contains(res.Error, "failed to start") { t.Fatalf("unexpected result: %+v", res) } } @@ -697,6 +1100,22 @@ func TestRunCodexTask_WithStdin(t *testing.T) { } } +func TestRunCodexProcess_WithStdin(t *testing.T) { + defer resetTestHooks() + codexCommand = "cat" + jsonOutput := `{"type":"thread.started","thread_id":"proc"}` + jsonOutput += "\n" + jsonOutput += `{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}` + + msg, tid, exit := runCodexProcess(context.Background(), []string{}, jsonOutput, true, 5) + if exit != 0 { + t.Fatalf("exit code %d, want 0", exit) + } + if msg != "ok" || tid != "proc" { + t.Fatalf("unexpected output msg=%q tid=%q", msg, tid) + } +} + func TestRunCodexTask_ExitError(t *testing.T) { defer resetTestHooks() codexCommand = "false" @@ -764,6 +1183,34 @@ func TestRunCodexTask_SignalHandling(t *testing.T) { } } +func TestForwardSignals_ContextCancel(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + forwardSignals(ctx, &exec.Cmd{}, func(string) {}) + cancel() + time.Sleep(10 * time.Millisecond) +} + +func TestCancelReason(t *testing.T) { + if got := cancelReason(nil); got != "Context cancelled" { + t.Fatalf("cancelReason(nil) = %q, want %q", got, "Context cancelled") + } + + ctxTimeout, cancelTimeout := context.WithTimeout(context.Background(), 1*time.Nanosecond) + defer cancelTimeout() + <-ctxTimeout.Done() + wantTimeout := fmt.Sprintf("%s execution timeout", codexCommand) + if got := cancelReason(ctxTimeout); got != wantTimeout { + t.Fatalf("cancelReason(deadline) = %q, want %q", got, wantTimeout) + } + + ctxCancelled, cancel := context.WithCancel(context.Background()) + cancel() + if got := cancelReason(ctxCancelled); got != "Execution cancelled, terminating codex process" { + t.Fatalf("cancelReason(cancelled) = %q, want %q", got, "Execution cancelled, terminating codex process") + } +} + func TestSilentMode(t *testing.T) { defer resetTestHooks() jsonOutput := `{"type":"thread.started","thread_id":"silent-session"} @@ -1042,7 +1489,7 @@ func TestRun_ParallelFlag(t *testing.T) { oldArgs := os.Args defer func() { os.Args = oldArgs }() - os.Args = []string{"codex-wrapper", "--parallel"} + os.Args = []string{"codeagent-wrapper", "--parallel"} jsonInput := `---TASK--- id: T1 ---CONTENT--- @@ -1065,7 +1512,7 @@ test` func TestRun_Version(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "--version"} + os.Args = []string{"codeagent-wrapper", "--version"} if code := run(); code != 0 { t.Errorf("exit = %d, want 0", code) } @@ -1073,7 +1520,7 @@ func TestRun_Version(t *testing.T) { func TestRun_VersionShort(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "-v"} + os.Args = []string{"codeagent-wrapper", "-v"} if code := run(); code != 0 { t.Errorf("exit = %d, want 0", code) } @@ -1081,7 +1528,7 @@ func TestRun_VersionShort(t *testing.T) { func TestRun_Help(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "--help"} + os.Args = []string{"codeagent-wrapper", "--help"} if code := run(); code != 0 { t.Errorf("exit = %d, want 0", code) } @@ -1089,7 +1536,7 @@ func TestRun_Help(t *testing.T) { func TestRun_HelpShort(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "-h"} + os.Args = []string{"codeagent-wrapper", "-h"} if code := run(); code != 0 { t.Errorf("exit = %d, want 0", code) } @@ -1097,7 +1544,7 @@ func TestRun_HelpShort(t *testing.T) { func TestRun_NoArgs(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper"} + os.Args = []string{"codeagent-wrapper"} if code := run(); code != 1 { t.Errorf("exit = %d, want 1", code) } @@ -1105,7 +1552,7 @@ func TestRun_NoArgs(t *testing.T) { func TestRun_ExplicitStdinEmpty(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "-"} + os.Args = []string{"codeagent-wrapper", "-"} stdinReader = strings.NewReader("") isTerminalFn = func() bool { return false } if code := run(); code != 1 { @@ -1117,7 +1564,7 @@ func TestRun_ExplicitStdinReadError(t *testing.T) { defer resetTestHooks() tempDir := t.TempDir() t.Setenv("TMPDIR", tempDir) - logPath := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid())) + logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid())) var logOutput string cleanupHook = func() { @@ -1127,7 +1574,7 @@ func TestRun_ExplicitStdinReadError(t *testing.T) { } } - os.Args = []string{"codex-wrapper", "-"} + os.Args = []string{"codeagent-wrapper", "-"} stdinReader = errReader{errors.New("broken stdin")} isTerminalFn = func() bool { return false } @@ -1147,23 +1594,35 @@ func TestRun_ExplicitStdinReadError(t *testing.T) { func TestRun_CommandFails(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "task"} + os.Args = []string{"codeagent-wrapper", "task"} stdinReader = strings.NewReader("") isTerminalFn = func() bool { return true } - codexCommand = "false" + restore := withBackend("false", func(cfg *Config, targetArg string) []string { return []string{} }) + defer restore() if code := run(); code == 0 { t.Errorf("expected non-zero") } } +func TestRun_InvalidBackend(t *testing.T) { + defer resetTestHooks() + os.Args = []string{"codeagent-wrapper", "--backend", "unknown", "task"} + stdinReader = strings.NewReader("") + isTerminalFn = func() bool { return true } + if code := run(); code == 0 { + t.Fatalf("expected non-zero exit for invalid backend") + } +} + func TestRun_SuccessfulExecution(t *testing.T) { defer resetTestHooks() stdout := captureStdoutPipe() - codexCommand = createFakeCodexScript(t, "tid-123", "ok") + restore := withBackend(createFakeCodexScript(t, "tid-123", "ok"), buildCodexArgs) + defer restore() stdinReader = strings.NewReader("") isTerminalFn = func() bool { return true } - os.Args = []string{"codex-wrapper", "task"} + os.Args = []string{"codeagent-wrapper", "task"} exitCode := run() if exitCode != 0 { @@ -1181,10 +1640,11 @@ func TestRun_ExplicitStdinSuccess(t *testing.T) { defer resetTestHooks() stdout := captureStdoutPipe() - codexCommand = createFakeCodexScript(t, "tid-stdin", "from-stdin") + restore := withBackend(createFakeCodexScript(t, "tid-stdin", "from-stdin"), buildCodexArgs) + defer restore() stdinReader = strings.NewReader("line1\nline2") isTerminalFn = func() bool { return false } - os.Args = []string{"codex-wrapper", "-"} + os.Args = []string{"codeagent-wrapper", "-"} exitCode := run() restoreStdoutPipe(stdout) @@ -1201,7 +1661,7 @@ func TestRun_PipedTaskReadError(t *testing.T) { defer resetTestHooks() tempDir := t.TempDir() t.Setenv("TMPDIR", tempDir) - logPath := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid())) + logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid())) var logOutput string cleanupHook = func() { @@ -1211,10 +1671,11 @@ func TestRun_PipedTaskReadError(t *testing.T) { } } - codexCommand = createFakeCodexScript(t, "tid-pipe", "piped-task") + restore := withBackend(createFakeCodexScript(t, "tid-pipe", "piped-task"), buildCodexArgs) + defer restore() isTerminalFn = func() bool { return false } stdinReader = errReader{errors.New("pipe failure")} - os.Args = []string{"codex-wrapper", "cli-task"} + os.Args = []string{"codeagent-wrapper", "cli-task"} exitCode := run() if exitCode != 1 { @@ -1233,10 +1694,11 @@ func TestRun_PipedTaskSuccess(t *testing.T) { defer resetTestHooks() stdout := captureStdoutPipe() - codexCommand = createFakeCodexScript(t, "tid-pipe", "piped-task") + restore := withBackend(createFakeCodexScript(t, "tid-pipe", "piped-task"), buildCodexArgs) + defer restore() isTerminalFn = func() bool { return false } stdinReader = strings.NewReader("piped task text") - os.Args = []string{"codex-wrapper", "cli-task"} + os.Args = []string{"codeagent-wrapper", "cli-task"} exitCode := run() restoreStdoutPipe(stdout) @@ -1253,14 +1715,15 @@ func TestRun_LoggerLifecycle(t *testing.T) { defer resetTestHooks() tempDir := t.TempDir() t.Setenv("TMPDIR", tempDir) - logPath := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid())) + logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid())) stdout := captureStdoutPipe() - codexCommand = createFakeCodexScript(t, "tid-logger", "ok") + restore := withBackend(createFakeCodexScript(t, "tid-logger", "ok"), buildCodexArgs) + defer restore() isTerminalFn = func() bool { return true } stdinReader = strings.NewReader("") - os.Args = []string{"codex-wrapper", "task"} + os.Args = []string{"codeagent-wrapper", "task"} var fileExisted bool cleanupHook = func() { @@ -1297,7 +1760,7 @@ func TestRun_LoggerRemovedOnSignal(t *testing.T) { tempDir := t.TempDir() t.Setenv("TMPDIR", tempDir) - logPath := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid())) + logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid())) scriptPath := filepath.Join(tempDir, "sleepy-codex.sh") script := `#!/bin/sh @@ -1308,10 +1771,11 @@ printf '%s\n' '{"type":"item.completed","item":{"type":"agent_message","text":"l t.Fatalf("failed to write script: %v", err) } - codexCommand = scriptPath + restore := withBackend(scriptPath, buildCodexArgs) + defer restore() isTerminalFn = func() bool { return true } stdinReader = strings.NewReader("") - os.Args = []string{"codex-wrapper", "task"} + os.Args = []string{"codeagent-wrapper", "task"} exitCh := make(chan int, 1) go func() { exitCh <- run() }() @@ -1347,10 +1811,12 @@ func TestRun_CleanupHookAlwaysCalled(t *testing.T) { called := false cleanupHook = func() { called = true } // Use a command that goes through normal flow, not --version which returns early - codexCommand = "echo" - buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{`{"type":"thread.started","thread_id":"x"} -{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`} } - os.Args = []string{"codex-wrapper", "task"} + restore := withBackend("echo", func(cfg *Config, targetArg string) []string { + return []string{`{"type":"thread.started","thread_id":"x"} +{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`} + }) + defer restore() + os.Args = []string{"codeagent-wrapper", "task"} if exitCode := run(); exitCode != 0 { t.Fatalf("exit = %d, want 0", exitCode) } @@ -1403,14 +1869,14 @@ func TestFarewellEmpty(t *testing.T) { func TestRun_CLI_Success(t *testing.T) { defer resetTestHooks() - os.Args = []string{"codex-wrapper", "do-things"} + os.Args = []string{"codeagent-wrapper", "do-things"} stdinReader = strings.NewReader("") isTerminalFn = func() bool { return true } - codexCommand = "echo" - buildCodexArgsFn = func(cfg *Config, targetArg string) []string { + restore := withBackend("echo", func(cfg *Config, targetArg string) []string { return []string{`{"type":"thread.started","thread_id":"cli-session"}` + "\n" + `{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`} - } + }) + defer restore() var exitCode int output := captureOutput(t, func() { exitCode = run() }) diff --git a/codeagent-wrapper/parser.go b/codeagent-wrapper/parser.go new file mode 100644 index 0000000..1e794da --- /dev/null +++ b/codeagent-wrapper/parser.go @@ -0,0 +1,241 @@ +package main + +import ( + "bufio" + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strings" +) + +// JSONEvent represents a Codex JSON output event +type JSONEvent struct { + Type string `json:"type"` + ThreadID string `json:"thread_id,omitempty"` + Item *EventItem `json:"item,omitempty"` +} + +// EventItem represents the item field in a JSON event +type EventItem struct { + Type string `json:"type"` + Text interface{} `json:"text"` +} + +// ClaudeEvent for Claude stream-json format +type ClaudeEvent struct { + Type string `json:"type"` + Subtype string `json:"subtype,omitempty"` + SessionID string `json:"session_id,omitempty"` + Result string `json:"result,omitempty"` +} + +// GeminiEvent for Gemini stream-json format +type GeminiEvent struct { + Type string `json:"type"` + SessionID string `json:"session_id,omitempty"` + Role string `json:"role,omitempty"` + Content string `json:"content,omitempty"` + Delta bool `json:"delta,omitempty"` + Status string `json:"status,omitempty"` +} + +func parseJSONStream(r io.Reader) (message, threadID string) { + return parseJSONStreamWithLog(r, logWarn, logInfo) +} + +func parseJSONStreamWithWarn(r io.Reader, warnFn func(string)) (message, threadID string) { + return parseJSONStreamWithLog(r, warnFn, logInfo) +} + +func parseJSONStreamWithLog(r io.Reader, warnFn func(string), infoFn func(string)) (message, threadID string) { + scanner := bufio.NewScanner(r) + scanner.Buffer(make([]byte, 64*1024), 10*1024*1024) + + if warnFn == nil { + warnFn = func(string) {} + } + if infoFn == nil { + infoFn = func(string) {} + } + + totalEvents := 0 + + var ( + codexMessage string + claudeMessage string + geminiBuffer strings.Builder + ) + + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line == "" { + continue + } + totalEvents++ + + var raw map[string]json.RawMessage + if err := json.Unmarshal([]byte(line), &raw); err != nil { + warnFn(fmt.Sprintf("Failed to parse line: %s", truncate(line, 100))) + continue + } + + hasItemType := false + if rawItem, ok := raw["item"]; ok { + var itemMap map[string]json.RawMessage + if err := json.Unmarshal(rawItem, &itemMap); err == nil { + if _, ok := itemMap["type"]; ok { + hasItemType = true + } + } + } + + isCodex := hasItemType + if !isCodex { + if _, ok := raw["thread_id"]; ok { + isCodex = true + } + } + + switch { + case isCodex: + var event JSONEvent + if err := json.Unmarshal([]byte(line), &event); err != nil { + warnFn(fmt.Sprintf("Failed to parse Codex event: %s", truncate(line, 100))) + continue + } + + var details []string + if event.ThreadID != "" { + details = append(details, fmt.Sprintf("thread_id=%s", event.ThreadID)) + } + if event.Item != nil && event.Item.Type != "" { + details = append(details, fmt.Sprintf("item_type=%s", event.Item.Type)) + } + if len(details) > 0 { + infoFn(fmt.Sprintf("Parsed event #%d type=%s (%s)", totalEvents, event.Type, strings.Join(details, ", "))) + } else { + infoFn(fmt.Sprintf("Parsed event #%d type=%s", totalEvents, event.Type)) + } + + switch event.Type { + case "thread.started": + threadID = event.ThreadID + infoFn(fmt.Sprintf("thread.started event thread_id=%s", threadID)) + case "item.completed": + var itemType string + var normalized string + if event.Item != nil { + itemType = event.Item.Type + normalized = normalizeText(event.Item.Text) + } + infoFn(fmt.Sprintf("item.completed event item_type=%s message_len=%d", itemType, len(normalized))) + if event.Item != nil && event.Item.Type == "agent_message" && normalized != "" { + codexMessage = normalized + } + } + + case hasKey(raw, "subtype") || hasKey(raw, "result"): + var event ClaudeEvent + if err := json.Unmarshal([]byte(line), &event); err != nil { + warnFn(fmt.Sprintf("Failed to parse Claude event: %s", truncate(line, 100))) + continue + } + + if event.SessionID != "" && threadID == "" { + threadID = event.SessionID + } + + infoFn(fmt.Sprintf("Parsed Claude event #%d type=%s subtype=%s result_len=%d", totalEvents, event.Type, event.Subtype, len(event.Result))) + + if event.Result != "" { + claudeMessage = event.Result + } + + case hasKey(raw, "role") || hasKey(raw, "delta"): + var event GeminiEvent + if err := json.Unmarshal([]byte(line), &event); err != nil { + warnFn(fmt.Sprintf("Failed to parse Gemini event: %s", truncate(line, 100))) + continue + } + + if event.SessionID != "" && threadID == "" { + threadID = event.SessionID + } + + if event.Content != "" { + geminiBuffer.WriteString(event.Content) + } + + infoFn(fmt.Sprintf("Parsed Gemini event #%d type=%s role=%s delta=%t status=%s content_len=%d", totalEvents, event.Type, event.Role, event.Delta, event.Status, len(event.Content))) + + default: + warnFn(fmt.Sprintf("Unknown event format: %s", truncate(line, 100))) + } + } + + if err := scanner.Err(); err != nil && !errors.Is(err, io.EOF) { + warnFn("Read stdout error: " + err.Error()) + } + + switch { + case geminiBuffer.Len() > 0: + message = geminiBuffer.String() + case claudeMessage != "": + message = claudeMessage + default: + message = codexMessage + } + + infoFn(fmt.Sprintf("parseJSONStream completed: events=%d, message_len=%d, thread_id_found=%t", totalEvents, len(message), threadID != "")) + return message, threadID +} + +func hasKey(m map[string]json.RawMessage, key string) bool { + _, ok := m[key] + return ok +} + +func discardInvalidJSON(decoder *json.Decoder, reader *bufio.Reader) (*bufio.Reader, error) { + var buffered bytes.Buffer + + if decoder != nil { + if buf := decoder.Buffered(); buf != nil { + _, _ = buffered.ReadFrom(buf) + } + } + + line, err := reader.ReadBytes('\n') + buffered.Write(line) + + data := buffered.Bytes() + newline := bytes.IndexByte(data, '\n') + if newline == -1 { + return reader, err + } + + remaining := data[newline+1:] + if len(remaining) == 0 { + return reader, err + } + + return bufio.NewReader(io.MultiReader(bytes.NewReader(remaining), reader)), err +} + +func normalizeText(text interface{}) string { + switch v := text.(type) { + case string: + return v + case []interface{}: + var sb strings.Builder + for _, item := range v { + if s, ok := item.(string); ok { + sb.WriteString(s) + } + } + return sb.String() + default: + return "" + } +} diff --git a/codeagent-wrapper/utils.go b/codeagent-wrapper/utils.go new file mode 100644 index 0000000..3f4fa89 --- /dev/null +++ b/codeagent-wrapper/utils.go @@ -0,0 +1,192 @@ +package main + +import ( + "bytes" + "fmt" + "io" + "os" + "strconv" + "strings" +) + +func resolveTimeout() int { + raw := os.Getenv("CODEX_TIMEOUT") + if raw == "" { + return defaultTimeout + } + + parsed, err := strconv.Atoi(raw) + if err != nil || parsed <= 0 { + logWarn(fmt.Sprintf("Invalid CODEX_TIMEOUT '%s', falling back to %ds", raw, defaultTimeout)) + return defaultTimeout + } + + if parsed > 10000 { + return parsed / 1000 + } + return parsed +} + +func readPipedTask() (string, error) { + if isTerminal() { + logInfo("Stdin is tty, skipping pipe read") + return "", nil + } + logInfo("Reading from stdin pipe...") + data, err := io.ReadAll(stdinReader) + if err != nil { + return "", fmt.Errorf("read stdin: %w", err) + } + if len(data) == 0 { + logInfo("Stdin pipe returned empty data") + return "", nil + } + logInfo(fmt.Sprintf("Read %d bytes from stdin pipe", len(data))) + return string(data), nil +} + +func shouldUseStdin(taskText string, piped bool) bool { + if piped { + return true + } + if len(taskText) > 800 { + return true + } + return strings.IndexAny(taskText, stdinSpecialChars) >= 0 +} + +func defaultIsTerminal() bool { + fi, err := os.Stdin.Stat() + if err != nil { + return true + } + return (fi.Mode() & os.ModeCharDevice) != 0 +} + +func isTerminal() bool { + return isTerminalFn() +} + +func getEnv(key, defaultValue string) string { + if val := os.Getenv(key); val != "" { + return val + } + return defaultValue +} + +type logWriter struct { + prefix string + maxLen int + buf bytes.Buffer +} + +func newLogWriter(prefix string, maxLen int) *logWriter { + if maxLen <= 0 { + maxLen = codexLogLineLimit + } + return &logWriter{prefix: prefix, maxLen: maxLen} +} + +func (lw *logWriter) Write(p []byte) (int, error) { + if lw == nil { + return len(p), nil + } + total := len(p) + for len(p) > 0 { + if idx := bytes.IndexByte(p, '\n'); idx >= 0 { + lw.buf.Write(p[:idx]) + lw.logLine(true) + p = p[idx+1:] + continue + } + lw.buf.Write(p) + break + } + return total, nil +} + +func (lw *logWriter) Flush() { + if lw == nil || lw.buf.Len() == 0 { + return + } + lw.logLine(false) +} + +func (lw *logWriter) logLine(force bool) { + if lw == nil { + return + } + line := lw.buf.String() + lw.buf.Reset() + if line == "" && !force { + return + } + if lw.maxLen > 0 && len(line) > lw.maxLen { + cutoff := lw.maxLen + if cutoff > 3 { + line = line[:cutoff-3] + "..." + } else { + line = line[:cutoff] + } + } + logInfo(lw.prefix + line) +} + +type tailBuffer struct { + limit int + data []byte +} + +func (b *tailBuffer) Write(p []byte) (int, error) { + if b.limit <= 0 { + return len(p), nil + } + + if len(p) >= b.limit { + b.data = append(b.data[:0], p[len(p)-b.limit:]...) + return len(p), nil + } + + total := len(b.data) + len(p) + if total <= b.limit { + b.data = append(b.data, p...) + return len(p), nil + } + + overflow := total - b.limit + b.data = append(b.data[overflow:], p...) + return len(p), nil +} + +func (b *tailBuffer) String() string { + return string(b.data) +} + +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + if maxLen < 0 { + return "" + } + return s[:maxLen] + "..." +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func hello() string { + return "hello world" +} + +func greet(name string) string { + return "hello " + name +} + +func farewell(name string) string { + return "goodbye " + name +} diff --git a/codex-wrapper/go.mod b/codex-wrapper/go.mod deleted file mode 100644 index 2bfa3be..0000000 --- a/codex-wrapper/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module codex-wrapper - -go 1.21 diff --git a/codex-wrapper/main.go b/codex-wrapper/main.go deleted file mode 100644 index 5edee6f..0000000 --- a/codex-wrapper/main.go +++ /dev/null @@ -1,1285 +0,0 @@ -package main - -import ( - "bufio" - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "os" - "os/exec" - "os/signal" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "syscall" - "time" -) - -const ( - version = "4.8.2" - defaultWorkdir = "." - defaultTimeout = 7200 // seconds - codexLogLineLimit = 1000 - stdinSpecialChars = "\n\\\"'`$" - stderrCaptureLimit = 4 * 1024 -) - -// Test hooks for dependency injection -var ( - stdinReader io.Reader = os.Stdin - isTerminalFn = defaultIsTerminal - codexCommand = "codex" - cleanupHook func() - loggerPtr atomic.Pointer[Logger] - - buildCodexArgsFn = buildCodexArgs - commandContext = exec.CommandContext - jsonMarshal = json.Marshal - forceKillDelay = 5 // seconds - made variable for testability -) - -// Config holds CLI configuration -type Config struct { - Mode string // "new" or "resume" - Task string - SessionID string - WorkDir string - ExplicitStdin bool - Timeout int -} - -// ParallelConfig defines the JSON schema for parallel execution -type ParallelConfig struct { - Tasks []TaskSpec `json:"tasks"` -} - -// TaskSpec describes an individual task entry in the parallel config -type TaskSpec struct { - ID string `json:"id"` - Task string `json:"task"` - WorkDir string `json:"workdir,omitempty"` - Dependencies []string `json:"dependencies,omitempty"` - SessionID string `json:"session_id,omitempty"` - Mode string `json:"-"` - UseStdin bool `json:"-"` -} - -// TaskResult captures the execution outcome of a task -type TaskResult struct { - TaskID string `json:"task_id"` - ExitCode int `json:"exit_code"` - Message string `json:"message"` - SessionID string `json:"session_id"` - Error string `json:"error"` -} - -func parseParallelConfig(data []byte) (*ParallelConfig, error) { - trimmed := bytes.TrimSpace(data) - if len(trimmed) == 0 { - return nil, fmt.Errorf("parallel config is empty") - } - - tasks := strings.Split(string(trimmed), "---TASK---") - var cfg ParallelConfig - seen := make(map[string]struct{}) - - for _, taskBlock := range tasks { - taskBlock = strings.TrimSpace(taskBlock) - if taskBlock == "" { - continue - } - - parts := strings.SplitN(taskBlock, "---CONTENT---", 2) - if len(parts) != 2 { - return nil, fmt.Errorf("task block missing ---CONTENT--- separator") - } - - meta := strings.TrimSpace(parts[0]) - content := strings.TrimSpace(parts[1]) - - task := TaskSpec{WorkDir: defaultWorkdir} - for _, line := range strings.Split(meta, "\n") { - line = strings.TrimSpace(line) - if line == "" { - continue - } - kv := strings.SplitN(line, ":", 2) - if len(kv) != 2 { - continue - } - key := strings.TrimSpace(kv[0]) - value := strings.TrimSpace(kv[1]) - - switch key { - case "id": - task.ID = value - case "workdir": - task.WorkDir = value - case "session_id": - task.SessionID = value - task.Mode = "resume" - case "dependencies": - for _, dep := range strings.Split(value, ",") { - dep = strings.TrimSpace(dep) - if dep != "" { - task.Dependencies = append(task.Dependencies, dep) - } - } - } - } - - if task.ID == "" { - return nil, fmt.Errorf("task missing id field") - } - if content == "" { - return nil, fmt.Errorf("task %q missing content", task.ID) - } - if _, exists := seen[task.ID]; exists { - return nil, fmt.Errorf("duplicate task id: %s", task.ID) - } - - task.Task = content - cfg.Tasks = append(cfg.Tasks, task) - seen[task.ID] = struct{}{} - } - - if len(cfg.Tasks) == 0 { - return nil, fmt.Errorf("no tasks found") - } - - return &cfg, nil -} - -func topologicalSort(tasks []TaskSpec) ([][]TaskSpec, error) { - idToTask := make(map[string]TaskSpec, len(tasks)) - indegree := make(map[string]int, len(tasks)) - adj := make(map[string][]string, len(tasks)) - - for _, task := range tasks { - idToTask[task.ID] = task - indegree[task.ID] = 0 - } - - for _, task := range tasks { - for _, dep := range task.Dependencies { - if _, ok := idToTask[dep]; !ok { - return nil, fmt.Errorf("dependency %q not found for task %q", dep, task.ID) - } - indegree[task.ID]++ - adj[dep] = append(adj[dep], task.ID) - } - } - - queue := make([]string, 0, len(tasks)) - for _, task := range tasks { - if indegree[task.ID] == 0 { - queue = append(queue, task.ID) - } - } - - layers := make([][]TaskSpec, 0) - processed := 0 - - for len(queue) > 0 { - current := queue - queue = nil - layer := make([]TaskSpec, len(current)) - for i, id := range current { - layer[i] = idToTask[id] - processed++ - } - layers = append(layers, layer) - - next := make([]string, 0) - for _, id := range current { - for _, neighbor := range adj[id] { - indegree[neighbor]-- - if indegree[neighbor] == 0 { - next = append(next, neighbor) - } - } - } - queue = append(queue, next...) - } - - if processed != len(tasks) { - cycleIDs := make([]string, 0) - for id, deg := range indegree { - if deg > 0 { - cycleIDs = append(cycleIDs, id) - } - } - sort.Strings(cycleIDs) - return nil, fmt.Errorf("cycle detected involving tasks: %s", strings.Join(cycleIDs, ",")) - } - - return layers, nil -} - -var runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult { - if task.WorkDir == "" { - task.WorkDir = defaultWorkdir - } - if task.Mode == "" { - task.Mode = "new" - } - if task.UseStdin || shouldUseStdin(task.Task, false) { - task.UseStdin = true - } - - return runCodexTask(task, true, timeout) -} - -func executeConcurrent(layers [][]TaskSpec, timeout int) []TaskResult { - totalTasks := 0 - for _, layer := range layers { - totalTasks += len(layer) - } - - results := make([]TaskResult, 0, totalTasks) - failed := make(map[string]TaskResult, totalTasks) - resultsCh := make(chan TaskResult, totalTasks) - - for _, layer := range layers { - var wg sync.WaitGroup - executed := 0 - - for _, task := range layer { - if skip, reason := shouldSkipTask(task, failed); skip { - res := TaskResult{TaskID: task.ID, ExitCode: 1, Error: reason} - results = append(results, res) - failed[task.ID] = res - continue - } - - executed++ - wg.Add(1) - go func(ts TaskSpec) { - defer wg.Done() - defer func() { - if r := recover(); r != nil { - resultsCh <- TaskResult{TaskID: ts.ID, ExitCode: 1, Error: fmt.Sprintf("panic: %v", r)} - } - }() - resultsCh <- runCodexTaskFn(ts, timeout) - }(task) - } - - wg.Wait() - - for i := 0; i < executed; i++ { - res := <-resultsCh - results = append(results, res) - if res.ExitCode != 0 || res.Error != "" { - failed[res.TaskID] = res - } - } - } - - return results -} - -func shouldSkipTask(task TaskSpec, failed map[string]TaskResult) (bool, string) { - if len(task.Dependencies) == 0 { - return false, "" - } - - var blocked []string - for _, dep := range task.Dependencies { - if _, ok := failed[dep]; ok { - blocked = append(blocked, dep) - } - } - - if len(blocked) == 0 { - return false, "" - } - - return true, fmt.Sprintf("skipped due to failed dependencies: %s", strings.Join(blocked, ",")) -} - -func generateFinalOutput(results []TaskResult) string { - var sb strings.Builder - - success := 0 - failed := 0 - for _, res := range results { - if res.ExitCode == 0 && res.Error == "" { - success++ - } else { - failed++ - } - } - - sb.WriteString(fmt.Sprintf("=== Parallel Execution Summary ===\n")) - sb.WriteString(fmt.Sprintf("Total: %d | Success: %d | Failed: %d\n\n", len(results), success, failed)) - - for _, res := range results { - sb.WriteString(fmt.Sprintf("--- Task: %s ---\n", res.TaskID)) - if res.Error != "" { - sb.WriteString(fmt.Sprintf("Status: FAILED (exit code %d)\nError: %s\n", res.ExitCode, res.Error)) - } else if res.ExitCode != 0 { - sb.WriteString(fmt.Sprintf("Status: FAILED (exit code %d)\n", res.ExitCode)) - } else { - sb.WriteString("Status: SUCCESS\n") - } - if res.SessionID != "" { - sb.WriteString(fmt.Sprintf("Session: %s\n", res.SessionID)) - } - if res.Message != "" { - sb.WriteString(fmt.Sprintf("\n%s\n", res.Message)) - } - sb.WriteString("\n") - } - - return sb.String() -} - -// JSONEvent represents a Codex JSON output event -type JSONEvent struct { - Type string `json:"type"` - ThreadID string `json:"thread_id,omitempty"` - Item *EventItem `json:"item,omitempty"` -} - -// EventItem represents the item field in a JSON event -type EventItem struct { - Type string `json:"type"` - Text interface{} `json:"text"` -} - -func main() { - exitCode := run() - os.Exit(exitCode) -} - -// run is the main logic, returns exit code for testability -func run() (exitCode int) { - // Handle --version and --help first (no logger needed) - if len(os.Args) > 1 { - switch os.Args[1] { - case "--version", "-v": - fmt.Printf("codex-wrapper version %s\n", version) - return 0 - case "--help", "-h": - printHelp() - return 0 - } - } - - // Initialize logger for all other commands - logger, err := NewLogger() - if err != nil { - fmt.Fprintf(os.Stderr, "ERROR: failed to initialize logger: %v\n", err) - return 1 - } - setLogger(logger) - - defer func() { - logger := activeLogger() - if logger != nil { - logger.Flush() - } - if err := closeLogger(); err != nil { - fmt.Fprintf(os.Stderr, "ERROR: failed to close logger: %v\n", err) - } - // Always remove log file after completion - if logger != nil { - if err := logger.RemoveLogFile(); err != nil && !os.IsNotExist(err) { - // Silently ignore removal errors - } - } - }() - defer runCleanupHook() - - // Handle remaining commands - if len(os.Args) > 1 { - switch os.Args[1] { - case "--parallel": - if len(os.Args) > 2 { - fmt.Fprintln(os.Stderr, "ERROR: --parallel reads its task configuration from stdin and does not accept additional arguments.") - fmt.Fprintln(os.Stderr, "Usage examples:") - fmt.Fprintln(os.Stderr, " codex-wrapper --parallel < tasks.txt") - fmt.Fprintln(os.Stderr, " echo '...' | codex-wrapper --parallel") - fmt.Fprintln(os.Stderr, " codex-wrapper --parallel <<'EOF'") - return 1 - } - data, err := io.ReadAll(stdinReader) - if err != nil { - fmt.Fprintf(os.Stderr, "ERROR: failed to read stdin: %v\n", err) - return 1 - } - - cfg, err := parseParallelConfig(data) - if err != nil { - fmt.Fprintf(os.Stderr, "ERROR: %v\n", err) - return 1 - } - - timeoutSec := resolveTimeout() - layers, err := topologicalSort(cfg.Tasks) - if err != nil { - fmt.Fprintf(os.Stderr, "ERROR: %v\n", err) - return 1 - } - - results := executeConcurrent(layers, timeoutSec) - fmt.Println(generateFinalOutput(results)) - - exitCode = 0 - for _, res := range results { - if res.ExitCode != 0 { - exitCode = res.ExitCode - } - } - - return exitCode - } - } - - logInfo("Script started") - - cfg, err := parseArgs() - if err != nil { - logError(err.Error()) - return 1 - } - logInfo(fmt.Sprintf("Parsed args: mode=%s, task_len=%d", cfg.Mode, len(cfg.Task))) - - timeoutSec := resolveTimeout() - logInfo(fmt.Sprintf("Timeout: %ds", timeoutSec)) - cfg.Timeout = timeoutSec - - var taskText string - var piped bool - - if cfg.ExplicitStdin { - logInfo("Explicit stdin mode: reading task from stdin") - data, err := io.ReadAll(stdinReader) - if err != nil { - logError("Failed to read stdin: " + err.Error()) - return 1 - } - taskText = string(data) - if taskText == "" { - logError("Explicit stdin mode requires task input from stdin") - return 1 - } - piped = !isTerminal() - } else { - pipedTask, err := readPipedTask() - if err != nil { - logError("Failed to read piped stdin: " + err.Error()) - return 1 - } - piped = pipedTask != "" - if piped { - taskText = pipedTask - } else { - taskText = cfg.Task - } - } - - useStdin := cfg.ExplicitStdin || shouldUseStdin(taskText, piped) - - targetArg := taskText - if useStdin { - targetArg = "-" - } - codexArgs := buildCodexArgsFn(cfg, targetArg) - - // Print startup information to stderr - fmt.Fprintf(os.Stderr, "[codex-wrapper]\n") - fmt.Fprintf(os.Stderr, " Command: %s %s\n", codexCommand, strings.Join(codexArgs, " ")) - fmt.Fprintf(os.Stderr, " PID: %d\n", os.Getpid()) - fmt.Fprintf(os.Stderr, " Log: %s\n", logger.Path()) - - if useStdin { - var reasons []string - if piped { - reasons = append(reasons, "piped input") - } - if cfg.ExplicitStdin { - reasons = append(reasons, "explicit \"-\"") - } - if strings.Contains(taskText, "\n") { - reasons = append(reasons, "newline") - } - if strings.Contains(taskText, "\\") { - reasons = append(reasons, "backslash") - } - if strings.Contains(taskText, "\"") { - reasons = append(reasons, "double-quote") - } - if strings.Contains(taskText, "'") { - reasons = append(reasons, "single-quote") - } - if strings.Contains(taskText, "`") { - reasons = append(reasons, "backtick") - } - if strings.Contains(taskText, "$") { - reasons = append(reasons, "dollar") - } - if len(taskText) > 800 { - reasons = append(reasons, "length>800") - } - if len(reasons) > 0 { - logWarn(fmt.Sprintf("Using stdin mode for task due to: %s", strings.Join(reasons, ", "))) - } - } - - logInfo("codex running...") - - taskSpec := TaskSpec{ - Task: taskText, - WorkDir: cfg.WorkDir, - Mode: cfg.Mode, - SessionID: cfg.SessionID, - UseStdin: useStdin, - } - - result := runCodexTask(taskSpec, false, cfg.Timeout) - - if result.ExitCode != 0 { - return result.ExitCode - } - - fmt.Println(result.Message) - if result.SessionID != "" { - fmt.Printf("\n---\nSESSION_ID: %s\n", result.SessionID) - } - - return 0 -} - -func parseArgs() (*Config, error) { - args := os.Args[1:] - if len(args) == 0 { - return nil, fmt.Errorf("task required") - } - - cfg := &Config{WorkDir: defaultWorkdir} - - if args[0] == "resume" { - if len(args) < 3 { - return nil, fmt.Errorf("resume mode requires: resume ") - } - cfg.Mode = "resume" - cfg.SessionID = args[1] - cfg.Task = args[2] - cfg.ExplicitStdin = (args[2] == "-") - if len(args) > 3 { - cfg.WorkDir = args[3] - } - } else { - cfg.Mode = "new" - cfg.Task = args[0] - cfg.ExplicitStdin = (args[0] == "-") - if len(args) > 1 { - cfg.WorkDir = args[1] - } - } - - return cfg, nil -} - -func readPipedTask() (string, error) { - if isTerminal() { - logInfo("Stdin is tty, skipping pipe read") - return "", nil - } - logInfo("Reading from stdin pipe...") - data, err := io.ReadAll(stdinReader) - if err != nil { - return "", fmt.Errorf("read stdin: %w", err) - } - if len(data) == 0 { - logInfo("Stdin pipe returned empty data") - return "", nil - } - logInfo(fmt.Sprintf("Read %d bytes from stdin pipe", len(data))) - return string(data), nil -} - -func shouldUseStdin(taskText string, piped bool) bool { - if piped { - return true - } - if len(taskText) > 800 { - return true - } - return strings.IndexAny(taskText, stdinSpecialChars) >= 0 -} - -func buildCodexArgs(cfg *Config, targetArg string) []string { - if cfg.Mode == "resume" { - return []string{ - "e", - "--skip-git-repo-check", - "--json", - "resume", - cfg.SessionID, - targetArg, - } - } - return []string{ - "e", - "--skip-git-repo-check", - "-C", cfg.WorkDir, - "--json", - targetArg, - } -} - -type parseResult struct { - message string - threadID string -} - -func runCodexTask(taskSpec TaskSpec, silent bool, timeoutSec int) TaskResult { - return runCodexTaskWithContext(context.Background(), taskSpec, nil, false, silent, timeoutSec) -} - -func runCodexProcess(parentCtx context.Context, codexArgs []string, taskText string, useStdin bool, timeoutSec int) (message, threadID string, exitCode int) { - res := runCodexTaskWithContext(parentCtx, TaskSpec{Task: taskText, WorkDir: defaultWorkdir, Mode: "new", UseStdin: useStdin}, codexArgs, true, false, timeoutSec) - return res.Message, res.SessionID, res.ExitCode -} - -func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, customArgs []string, useCustomArgs bool, silent bool, timeoutSec int) TaskResult { - result := TaskResult{TaskID: taskSpec.ID} - - cfg := &Config{ - Mode: taskSpec.Mode, - Task: taskSpec.Task, - SessionID: taskSpec.SessionID, - WorkDir: taskSpec.WorkDir, - } - if cfg.Mode == "" { - cfg.Mode = "new" - } - if cfg.WorkDir == "" { - cfg.WorkDir = defaultWorkdir - } - - useStdin := taskSpec.UseStdin - targetArg := taskSpec.Task - if useStdin { - targetArg = "-" - } - - var codexArgs []string - if useCustomArgs { - codexArgs = customArgs - } else { - codexArgs = buildCodexArgsFn(cfg, targetArg) - } - - prefixMsg := func(msg string) string { - if taskSpec.ID == "" { - return msg - } - return fmt.Sprintf("[Task: %s] %s", taskSpec.ID, msg) - } - - var logInfoFn func(string) - var logWarnFn func(string) - var logErrorFn func(string) - - if silent { - // Silent mode: only persist to file when available; avoid stderr noise. - logInfoFn = func(msg string) { - if logger := activeLogger(); logger != nil { - logger.Info(prefixMsg(msg)) - } - } - logWarnFn = func(msg string) { - if logger := activeLogger(); logger != nil { - logger.Warn(prefixMsg(msg)) - } - } - logErrorFn = func(msg string) { - if logger := activeLogger(); logger != nil { - logger.Error(prefixMsg(msg)) - } - } - } else { - logInfoFn = func(msg string) { logInfo(prefixMsg(msg)) } - logWarnFn = func(msg string) { logWarn(prefixMsg(msg)) } - logErrorFn = func(msg string) { logError(prefixMsg(msg)) } - } - - stderrBuf := &tailBuffer{limit: stderrCaptureLimit} - - var stdoutLogger *logWriter - var stderrLogger *logWriter - - var tempLogger *Logger - if silent && activeLogger() == nil { - if l, err := NewLogger(); err == nil { - setLogger(l) - tempLogger = l - } - } - defer func() { - if tempLogger != nil { - closeLogger() - } - }() - - if !silent { - stdoutLogger = newLogWriter("CODEX_STDOUT: ", codexLogLineLimit) - stderrLogger = newLogWriter("CODEX_STDERR: ", codexLogLineLimit) - } - - ctx := parentCtx - if ctx == nil { - ctx = context.Background() - } - - ctx, cancel := context.WithTimeout(ctx, time.Duration(timeoutSec)*time.Second) - defer cancel() - ctx, stop := signal.NotifyContext(ctx, syscall.SIGINT, syscall.SIGTERM) - defer stop() - - attachStderr := func(msg string) string { - return fmt.Sprintf("%s; stderr: %s", msg, stderrBuf.String()) - } - - cmd := commandContext(ctx, codexCommand, codexArgs...) - - stderrWriters := []io.Writer{stderrBuf} - if stderrLogger != nil { - stderrWriters = append(stderrWriters, stderrLogger) - } - if !silent { - stderrWriters = append([]io.Writer{os.Stderr}, stderrWriters...) - } - if len(stderrWriters) == 1 { - cmd.Stderr = stderrWriters[0] - } else { - cmd.Stderr = io.MultiWriter(stderrWriters...) - } - - var stdinPipe io.WriteCloser - var err error - if useStdin { - stdinPipe, err = cmd.StdinPipe() - if err != nil { - logErrorFn("Failed to create stdin pipe: " + err.Error()) - result.ExitCode = 1 - result.Error = attachStderr("failed to create stdin pipe: " + err.Error()) - return result - } - } - - stdout, err := cmd.StdoutPipe() - if err != nil { - logErrorFn("Failed to create stdout pipe: " + err.Error()) - result.ExitCode = 1 - result.Error = attachStderr("failed to create stdout pipe: " + err.Error()) - return result - } - - stdoutReader := io.Reader(stdout) - if stdoutLogger != nil { - stdoutReader = io.TeeReader(stdout, stdoutLogger) - } - - logInfoFn(fmt.Sprintf("Starting codex with args: codex %s...", strings.Join(codexArgs[:min(5, len(codexArgs))], " "))) - - if err := cmd.Start(); err != nil { - if strings.Contains(err.Error(), "executable file not found") { - logErrorFn("codex command not found in PATH") - result.ExitCode = 127 - result.Error = attachStderr("codex command not found in PATH") - return result - } - logErrorFn("Failed to start codex: " + err.Error()) - result.ExitCode = 1 - result.Error = attachStderr("failed to start codex: " + err.Error()) - return result - } - - logInfoFn(fmt.Sprintf("Starting codex with PID: %d", cmd.Process.Pid)) - if logger := activeLogger(); logger != nil { - logInfoFn(fmt.Sprintf("Log capturing to: %s", logger.Path())) - } - - if useStdin && stdinPipe != nil { - logInfoFn(fmt.Sprintf("Writing %d chars to stdin...", len(taskSpec.Task))) - go func(data string) { - defer stdinPipe.Close() - _, _ = io.WriteString(stdinPipe, data) - }(taskSpec.Task) - logInfoFn("Stdin closed") - } - - waitCh := make(chan error, 1) - go func() { waitCh <- cmd.Wait() }() - - parseCh := make(chan parseResult, 1) - go func() { - msg, tid := parseJSONStreamWithLog(stdoutReader, logWarnFn, logInfoFn) - parseCh <- parseResult{message: msg, threadID: tid} - }() - - var waitErr error - var forceKillTimer *time.Timer - - select { - case waitErr = <-waitCh: - case <-ctx.Done(): - logErrorFn(cancelReason(ctx)) - forceKillTimer = terminateProcess(cmd) - waitErr = <-waitCh - } - - if forceKillTimer != nil { - forceKillTimer.Stop() - } - - parsed := <-parseCh - - if ctxErr := ctx.Err(); ctxErr != nil { - if errors.Is(ctxErr, context.DeadlineExceeded) { - result.ExitCode = 124 - result.Error = attachStderr("codex execution timeout") - return result - } - result.ExitCode = 130 - result.Error = attachStderr("execution cancelled") - return result - } - - if waitErr != nil { - if exitErr, ok := waitErr.(*exec.ExitError); ok { - code := exitErr.ExitCode() - logErrorFn(fmt.Sprintf("Codex exited with status %d", code)) - result.ExitCode = code - result.Error = attachStderr(fmt.Sprintf("codex exited with status %d", code)) - return result - } - logErrorFn("Codex error: " + waitErr.Error()) - result.ExitCode = 1 - result.Error = attachStderr("codex error: " + waitErr.Error()) - return result - } - - message := parsed.message - threadID := parsed.threadID - if message == "" { - logErrorFn("Codex completed without agent_message output") - result.ExitCode = 1 - result.Error = attachStderr("codex completed without agent_message output") - return result - } - - if stdoutLogger != nil { - stdoutLogger.Flush() - } - if stderrLogger != nil { - stderrLogger.Flush() - } - - result.ExitCode = 0 - result.Message = message - result.SessionID = threadID - - return result -} - -type tailBuffer struct { - limit int - data []byte -} - -func (b *tailBuffer) Write(p []byte) (int, error) { - if b.limit <= 0 { - return len(p), nil - } - - if len(p) >= b.limit { - b.data = append(b.data[:0], p[len(p)-b.limit:]...) - return len(p), nil - } - - total := len(b.data) + len(p) - if total <= b.limit { - b.data = append(b.data, p...) - return len(p), nil - } - - overflow := total - b.limit - b.data = append(b.data[overflow:], p...) - return len(p), nil -} - -func (b *tailBuffer) String() string { - return string(b.data) -} - -func forwardSignals(ctx context.Context, cmd *exec.Cmd, logErrorFn func(string)) { - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) - - go func() { - defer signal.Stop(sigCh) - select { - case sig := <-sigCh: - logErrorFn(fmt.Sprintf("Received signal: %v", sig)) - if cmd.Process != nil { - cmd.Process.Signal(syscall.SIGTERM) - time.AfterFunc(time.Duration(forceKillDelay)*time.Second, func() { - if cmd.Process != nil { - cmd.Process.Kill() - } - }) - } - case <-ctx.Done(): - } - }() -} - -func cancelReason(ctx context.Context) string { - if ctx == nil { - return "Context cancelled" - } - - if errors.Is(ctx.Err(), context.DeadlineExceeded) { - return "Codex execution timeout" - } - - return "Execution cancelled, terminating codex process" -} - -func terminateProcess(cmd *exec.Cmd) *time.Timer { - if cmd == nil || cmd.Process == nil { - return nil - } - - _ = cmd.Process.Signal(syscall.SIGTERM) - - return time.AfterFunc(time.Duration(forceKillDelay)*time.Second, func() { - if cmd.Process != nil { - _ = cmd.Process.Kill() - } - }) -} - -func parseJSONStream(r io.Reader) (message, threadID string) { - return parseJSONStreamWithLog(r, logWarn, logInfo) -} - -func parseJSONStreamWithWarn(r io.Reader, warnFn func(string)) (message, threadID string) { - return parseJSONStreamWithLog(r, warnFn, logInfo) -} - -func parseJSONStreamWithLog(r io.Reader, warnFn func(string), infoFn func(string)) (message, threadID string) { - scanner := bufio.NewScanner(r) - scanner.Buffer(make([]byte, 64*1024), 10*1024*1024) - - if warnFn == nil { - warnFn = func(string) {} - } - if infoFn == nil { - infoFn = func(string) {} - } - - totalEvents := 0 - - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if line == "" { - continue - } - totalEvents++ - - var event JSONEvent - if err := json.Unmarshal([]byte(line), &event); err != nil { - warnFn(fmt.Sprintf("Failed to parse line: %s", truncate(line, 100))) - continue - } - - var details []string - if event.ThreadID != "" { - details = append(details, fmt.Sprintf("thread_id=%s", event.ThreadID)) - } - if event.Item != nil && event.Item.Type != "" { - details = append(details, fmt.Sprintf("item_type=%s", event.Item.Type)) - } - if len(details) > 0 { - infoFn(fmt.Sprintf("Parsed event #%d type=%s (%s)", totalEvents, event.Type, strings.Join(details, ", "))) - } else { - infoFn(fmt.Sprintf("Parsed event #%d type=%s", totalEvents, event.Type)) - } - - switch event.Type { - case "thread.started": - threadID = event.ThreadID - infoFn(fmt.Sprintf("thread.started event thread_id=%s", threadID)) - case "item.completed": - var itemType string - var normalized string - if event.Item != nil { - itemType = event.Item.Type - normalized = normalizeText(event.Item.Text) - } - infoFn(fmt.Sprintf("item.completed event item_type=%s message_len=%d", itemType, len(normalized))) - if event.Item != nil && event.Item.Type == "agent_message" && normalized != "" { - message = normalized - } - } - } - - if err := scanner.Err(); err != nil && !errors.Is(err, io.EOF) { - warnFn("Read stdout error: " + err.Error()) - } - - infoFn(fmt.Sprintf("parseJSONStream completed: events=%d, message_len=%d, thread_id_found=%t", totalEvents, len(message), threadID != "")) - return message, threadID -} - -func discardInvalidJSON(decoder *json.Decoder, reader *bufio.Reader) (*bufio.Reader, error) { - var buffered bytes.Buffer - - if decoder != nil { - if buf := decoder.Buffered(); buf != nil { - _, _ = buffered.ReadFrom(buf) - } - } - - line, err := reader.ReadBytes('\n') - buffered.Write(line) - - data := buffered.Bytes() - newline := bytes.IndexByte(data, '\n') - if newline == -1 { - return reader, err - } - - remaining := data[newline+1:] - if len(remaining) == 0 { - return reader, err - } - - return bufio.NewReader(io.MultiReader(bytes.NewReader(remaining), reader)), err -} - -func normalizeText(text interface{}) string { - switch v := text.(type) { - case string: - return v - case []interface{}: - var sb strings.Builder - for _, item := range v { - if s, ok := item.(string); ok { - sb.WriteString(s) - } - } - return sb.String() - default: - return "" - } -} - -func resolveTimeout() int { - raw := os.Getenv("CODEX_TIMEOUT") - if raw == "" { - return defaultTimeout - } - - parsed, err := strconv.Atoi(raw) - if err != nil || parsed <= 0 { - logWarn(fmt.Sprintf("Invalid CODEX_TIMEOUT '%s', falling back to %ds", raw, defaultTimeout)) - return defaultTimeout - } - - if parsed > 10000 { - return parsed / 1000 - } - return parsed -} - -func defaultIsTerminal() bool { - fi, err := os.Stdin.Stat() - if err != nil { - return true - } - return (fi.Mode() & os.ModeCharDevice) != 0 -} - -func isTerminal() bool { - return isTerminalFn() -} - -func getEnv(key, defaultValue string) string { - if val := os.Getenv(key); val != "" { - return val - } - return defaultValue -} - -type logWriter struct { - prefix string - maxLen int - buf bytes.Buffer -} - -func newLogWriter(prefix string, maxLen int) *logWriter { - if maxLen <= 0 { - maxLen = codexLogLineLimit - } - return &logWriter{prefix: prefix, maxLen: maxLen} -} - -func (lw *logWriter) Write(p []byte) (int, error) { - if lw == nil { - return len(p), nil - } - total := len(p) - for len(p) > 0 { - if idx := bytes.IndexByte(p, '\n'); idx >= 0 { - lw.buf.Write(p[:idx]) - lw.logLine(true) - p = p[idx+1:] - continue - } - lw.buf.Write(p) - break - } - return total, nil -} - -func (lw *logWriter) Flush() { - if lw == nil || lw.buf.Len() == 0 { - return - } - lw.logLine(false) -} - -func (lw *logWriter) logLine(force bool) { - if lw == nil { - return - } - line := lw.buf.String() - lw.buf.Reset() - if line == "" && !force { - return - } - if lw.maxLen > 0 && len(line) > lw.maxLen { - cutoff := lw.maxLen - if cutoff > 3 { - line = line[:cutoff-3] + "..." - } else { - line = line[:cutoff] - } - } - logInfo(lw.prefix + line) -} - -func truncate(s string, maxLen int) string { - if len(s) <= maxLen { - return s - } - if maxLen < 0 { - return "" - } - return s[:maxLen] + "..." -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func setLogger(l *Logger) { - loggerPtr.Store(l) -} - -func closeLogger() error { - logger := loggerPtr.Swap(nil) - if logger == nil { - return nil - } - return logger.Close() -} - -func activeLogger() *Logger { - return loggerPtr.Load() -} - -func hello() string { - return "hello world" -} - -func greet(name string) string { - return "hello " + name -} - -func farewell(name string) string { - return "goodbye " + name -} - -func logInfo(msg string) { - if logger := activeLogger(); logger != nil { - logger.Info(msg) - } -} - -func logWarn(msg string) { - if logger := activeLogger(); logger != nil { - logger.Warn(msg) - } -} - -func logError(msg string) { - if logger := activeLogger(); logger != nil { - logger.Error(msg) - } -} - -func runCleanupHook() { - if logger := activeLogger(); logger != nil { - logger.Flush() - } - if cleanupHook != nil { - cleanupHook() - } -} - -func printHelp() { - help := `codex-wrapper - Go wrapper for Codex CLI - -Usage: - codex-wrapper "task" [workdir] - codex-wrapper - [workdir] Read task from stdin - codex-wrapper resume "task" [workdir] - codex-wrapper resume - [workdir] - codex-wrapper --parallel Run tasks in parallel (config from stdin) - codex-wrapper --version - codex-wrapper --help - -Parallel mode examples: - codex-wrapper --parallel < tasks.txt - echo '...' | codex-wrapper --parallel - codex-wrapper --parallel <<'EOF' - -Environment Variables: - CODEX_TIMEOUT Timeout in milliseconds (default: 7200000) - -Exit Codes: - 0 Success - 1 General error (missing args, no output) - 124 Timeout - 127 codex command not found - 130 Interrupted (Ctrl+C) - * Passthrough from codex process` - fmt.Println(help) -} diff --git a/config.json b/config.json index dddb09b..056deb2 100644 --- a/config.json +++ b/config.json @@ -27,7 +27,7 @@ { "type": "run_command", "command": "bash install.sh", - "description": "Install codex-wrapper binary", + "description": "Install codeagent-wrapper binary", "env": { "INSTALL_DIR": "${install_dir}" } @@ -84,6 +84,36 @@ "description": "Copy development commands documentation" } ] + }, + "gh": { + "enabled": true, + "description": "GitHub issue-to-PR workflow with codeagent integration", + "operations": [ + { + "type": "merge_dir", + "source": "github-workflow", + "description": "Merge GitHub workflow commands" + }, + { + "type": "copy_file", + "source": "skills/codeagent/SKILL.md", + "target": "skills/codeagent/SKILL.md", + "description": "Install codeagent skill" + }, + { + "type": "copy_dir", + "source": "hooks", + "target": "hooks", + "description": "Copy hooks scripts" + }, + { + "type": "merge_json", + "source": "hooks/hooks-config.json", + "target": "settings.json", + "merge_key": "hooks", + "description": "Merge hooks configuration into settings.json" + } + ] } } } diff --git a/config.schema.json b/config.schema.json index 82d2516..ae7c750 100644 --- a/config.schema.json +++ b/config.schema.json @@ -49,6 +49,7 @@ { "$ref": "#/$defs/op_copy_dir" }, { "$ref": "#/$defs/op_copy_file" }, { "$ref": "#/$defs/op_merge_dir" }, + { "$ref": "#/$defs/op_merge_json" }, { "$ref": "#/$defs/op_run_command" } ] }, @@ -91,6 +92,18 @@ "description": { "type": "string" } } }, + "op_merge_json": { + "type": "object", + "additionalProperties": false, + "required": ["type", "source", "target"], + "properties": { + "type": { "const": "merge_json" }, + "source": { "type": "string", "minLength": 1 }, + "target": { "type": "string", "minLength": 1 }, + "merge_key": { "type": "string" }, + "description": { "type": "string" } + } + }, "op_run_command": { "type": "object", "additionalProperties": false, diff --git a/docs/CODEAGENT-WRAPPER.md b/docs/CODEAGENT-WRAPPER.md new file mode 100644 index 0000000..e8dc398 --- /dev/null +++ b/docs/CODEAGENT-WRAPPER.md @@ -0,0 +1,407 @@ +# Codeagent-Wrapper User Guide + +Multi-backend AI code execution wrapper supporting Codex, Claude, and Gemini. + +## Overview + +`codeagent-wrapper` is a Go-based CLI tool that provides a unified interface to multiple AI coding backends. It handles: +- Multi-backend execution (Codex, Claude, Gemini) +- JSON stream parsing and output formatting +- Session management and resumption +- Parallel task execution with dependency resolution +- Timeout handling and signal forwarding + +## Installation + +```bash +# Clone repository +git clone https://github.com/cexll/myclaude.git +cd myclaude + +# Install via install.py (includes binary compilation) +python3 install.py --module dev + +# Or manual installation +cd codeagent-wrapper +go build -o ~/.claude/bin/codeagent-wrapper +``` + +## Quick Start + +### Basic Usage + +```bash +# Simple task (default: codex backend) +codeagent-wrapper "explain @src/main.go" + +# With backend selection +codeagent-wrapper --backend claude "refactor @utils.ts" + +# With HEREDOC (recommended for complex tasks) +codeagent-wrapper --backend gemini - <<'EOF' +Implement user authentication: +- JWT tokens +- Password hashing with bcrypt +- Session management +EOF +``` + +### Backend Selection + +| Backend | Command | Best For | +|---------|---------|----------| +| **Codex** | `--backend codex` | General code tasks (default) | +| **Claude** | `--backend claude` | Complex reasoning, architecture | +| **Gemini** | `--backend gemini` | Fast iteration, prototyping | + +## Core Features + +### 1. Multi-Backend Support + +```bash +# Codex (default) +codeagent-wrapper "add logging to @app.js" + +# Claude for architecture decisions +codeagent-wrapper --backend claude - <<'EOF' +Design a microservices architecture for e-commerce: +- Service boundaries +- Communication patterns +- Data consistency strategy +EOF + +# Gemini for quick prototypes +codeagent-wrapper --backend gemini "create React component for user profile" +``` + +### 2. File References with @ Syntax + +```bash +# Single file +codeagent-wrapper "optimize @src/utils.ts" + +# Multiple files +codeagent-wrapper "refactor @src/auth.ts and @src/middleware.ts" + +# Entire directory +codeagent-wrapper "analyze @src for security issues" +``` + +### 3. Session Management + +```bash +# First task +codeagent-wrapper "add validation to user form" +# Output includes: SESSION_ID: 019a7247-ac9d-71f3-89e2-a823dbd8fd14 + +# Resume session +codeagent-wrapper resume 019a7247-ac9d-71f3-89e2-a823dbd8fd14 - <<'EOF' +Now add error messages for each validation rule +EOF +``` + +### 4. Parallel Execution + +Execute multiple tasks concurrently with dependency management: + +```bash +codeagent-wrapper --parallel <<'EOF' +---TASK--- +id: backend_1701234567 +workdir: /project/backend +---CONTENT--- +implement /api/users endpoints with CRUD operations + +---TASK--- +id: frontend_1701234568 +workdir: /project/frontend +---CONTENT--- +build Users page consuming /api/users + +---TASK--- +id: tests_1701234569 +workdir: /project/tests +dependencies: backend_1701234567, frontend_1701234568 +---CONTENT--- +add integration tests for user management flow +EOF +``` + +**Parallel Task Format:** +- `---TASK---` - Starts task block +- `id: ` - Required, use `_` format +- `workdir: ` - Optional, defaults to current directory +- `dependencies: , ` - Optional, comma-separated task IDs +- `---CONTENT---` - Separates metadata from task content + +**Features:** +- Automatic topological sorting +- Unlimited concurrency for independent tasks +- Error isolation (failures don't stop other tasks) +- Dependency blocking (skip if parent fails) + +### 5. Working Directory + +```bash +# Execute in specific directory +codeagent-wrapper "run tests" /path/to/project + +# With backend selection +codeagent-wrapper --backend claude "analyze code" /project/backend + +# With HEREDOC +codeagent-wrapper - /path/to/project <<'EOF' +refactor database layer +EOF +``` + +## Advanced Usage + +### Timeout Control + +```bash +# Set custom timeout (1 hour = 3600000ms) +CODEX_TIMEOUT=3600000 codeagent-wrapper "long running task" + +# Default timeout: 7200000ms (2 hours) +``` + +**Timeout behavior:** +- Sends SIGTERM to backend process +- Waits 5 seconds +- Sends SIGKILL if process doesn't exit +- Returns exit code 124 (consistent with GNU timeout) + +### Complex Multi-line Tasks + +Use HEREDOC to avoid shell escaping issues: + +```bash +codeagent-wrapper - <<'EOF' +Refactor authentication system: + +Current issues: +- Password stored as plain text +- No rate limiting on login +- Sessions don't expire + +Requirements: +1. Hash passwords with bcrypt +2. Add rate limiting (5 attempts/15min) +3. Session expiry after 24h +4. Add refresh token mechanism + +Files to modify: +- @src/auth/login.ts +- @src/middleware/rateLimit.ts +- @config/session.ts +EOF +``` + +### Backend-Specific Features + +**Codex:** +```bash +# Best for code editing and refactoring +codeagent-wrapper --backend codex - <<'EOF' +extract duplicate code in @src into reusable helpers +EOF +``` + +**Claude:** +```bash +# Best for complex reasoning +codeagent-wrapper --backend claude - <<'EOF' +review @src/payment/processor.ts for: +- Race conditions +- Edge cases +- Security vulnerabilities +EOF +``` + +**Gemini:** +```bash +# Best for fast iteration +codeagent-wrapper --backend gemini "add TypeScript types to @api.js" +``` + +## Output Format + +Standard output includes parsed agent messages and session ID: + +``` +Agent response text here... +Implementation details... + +--- +SESSION_ID: 019a7247-ac9d-71f3-89e2-a823dbd8fd14 +``` + +Error output (stderr): +``` +ERROR: Error message details +``` + +Parallel execution output: +``` +=== Parallel Execution Summary === +Total: 3 | Success: 2 | Failed: 1 + +--- Task: backend_1701234567 --- +Status: SUCCESS +Session: 019a7247-ac9d-71f3-89e2-a823dbd8fd14 + +Implementation complete... + +--- Task: frontend_1701234568 --- +Status: SUCCESS +Session: 019a7248-ac9d-71f3-89e2-a823dbd8fd14 + +UI components created... + +--- Task: tests_1701234569 --- +Status: FAILED (exit code 1) +Error: dependency backend_1701234567 failed +``` + +## Exit Codes + +| Code | Meaning | +|------|---------| +| 0 | Success | +| 1 | General error (missing args, no output) | +| 124 | Timeout | +| 127 | Backend command not found | +| 130 | Interrupted (Ctrl+C) | +| * | Passthrough from backend process | + +## Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `CODEX_TIMEOUT` | 7200000 | Timeout in milliseconds | + +## Troubleshooting + +**Backend not found:** +```bash +# Ensure backend CLI is installed +which codex +which claude +which gemini + +# Check PATH +echo $PATH +``` + +**Timeout too short:** +```bash +# Increase timeout to 4 hours +CODEX_TIMEOUT=14400000 codeagent-wrapper "complex task" +``` + +**Session ID not found:** +```bash +# List recent sessions (backend-specific) +codex history + +# Ensure session ID is copied correctly +codeagent-wrapper resume "continue task" +``` + +**Parallel tasks not running:** +```bash +# Check task format +# Ensure ---TASK--- and ---CONTENT--- delimiters are correct +# Verify task IDs are unique +# Check dependencies reference existing task IDs +``` + +## Integration with Claude Code + +Use via the `codeagent` skill: + +```bash +# In Claude Code conversation +User: Use codeagent to implement authentication + +# Claude will execute: +codeagent-wrapper --backend codex - <<'EOF' +implement JWT authentication in @src/auth +EOF +``` + +## Performance Tips + +1. **Use parallel execution** for independent tasks +2. **Choose the right backend** for the task type +3. **Keep working directory specific** to reduce context +4. **Resume sessions** for multi-step workflows +5. **Use @ syntax** to minimize file content in prompts + +## Best Practices + +1. **HEREDOC for complex tasks** - Avoid shell escaping nightmares +2. **Descriptive task IDs** - Use `_` format +3. **Absolute paths** - Avoid relative path confusion +4. **Session resumption** - Continue conversations with context +5. **Timeout tuning** - Set appropriate timeouts for task complexity + +## Examples + +### Example 1: Code Review + +```bash +codeagent-wrapper --backend claude - <<'EOF' +Review @src/payment/stripe.ts for: +1. Security issues (API key handling, input validation) +2. Error handling (network failures, API errors) +3. Edge cases (duplicate charges, partial refunds) +4. Code quality (naming, structure, comments) +EOF +``` + +### Example 2: Refactoring + +```bash +codeagent-wrapper --backend codex - <<'EOF' +Refactor @src/utils: +- Extract duplicate code into helpers +- Add TypeScript types +- Improve function naming +- Add JSDoc comments +EOF +``` + +### Example 3: Full-Stack Feature + +```bash +codeagent-wrapper --parallel <<'EOF' +---TASK--- +id: api_1701234567 +workdir: /project/backend +---CONTENT--- +implement /api/notifications endpoints with WebSocket support + +---TASK--- +id: ui_1701234568 +workdir: /project/frontend +dependencies: api_1701234567 +---CONTENT--- +build Notifications component with real-time updates + +---TASK--- +id: tests_1701234569 +workdir: /project +dependencies: api_1701234567, ui_1701234568 +---CONTENT--- +add E2E tests for notification flow +EOF +``` + +## Further Reading + +- [Codex CLI Documentation](https://codex.docs) +- [Claude CLI Documentation](https://claude.ai/docs) +- [Gemini CLI Documentation](https://ai.google.dev/docs) +- [Architecture Overview](./architecture.md) diff --git a/docs/GITHUB-WORKFLOW.md b/docs/GITHUB-WORKFLOW.md new file mode 100644 index 0000000..abc5584 --- /dev/null +++ b/docs/GITHUB-WORKFLOW.md @@ -0,0 +1,468 @@ +# GitHub Workflow Commands + +Issue-to-PR automation with `codeagent-wrapper` integration. + +## Overview + +Two commands streamline GitHub workflows: +- `/gh-create-issue` - Create structured issues through guided dialogue +- `/gh-implement` - Implement issues with automated development lifecycle + +## Prerequisites + +```bash +# Install GitHub CLI +brew install gh + +# Authenticate +gh auth login + +# Verify +gh auth status +``` + +## Commands + +### /gh-create-issue + +Create structured GitHub issues through multi-round dialogue. + +**Usage:** +```bash +/gh-create-issue "add user authentication" +``` + +**Process:** + +1. **Initial Analysis** - Claude analyzes your brief description +2. **Clarification Questions** - Interactive Q&A to gather: + - Technical requirements + - Acceptance criteria + - Dependencies + - Priority/labels +3. **Issue Creation** - Generates structured issue with: + - Clear title + - Detailed description + - Acceptance criteria checklist + - Relevant labels + - Assignee (optional) + +**Example Session:** + +``` +User: /gh-create-issue "add dark mode" + +Claude: I'll help create a structured issue for dark mode. Let me gather some details. + +1. Scope: Just theme toggle, or also persist user preference? +2. Which components need dark mode support? +3. Design system: Using CSS variables or styled-components? + +User: +1. Toggle + persist in localStorage +2. All components in src/components +3. CSS variables + +Claude: Creating issue... + +✅ Issue created: #123 "Implement dark mode with theme persistence" +URL: https://github.com/user/repo/issues/123 +``` + +**Generated Issue Format:** + +```markdown +# Implement dark mode with theme persistence + +## Description +Add dark mode support across all components with user preference persistence. + +## Requirements +- [ ] Add theme toggle component +- [ ] Implement CSS variables for light/dark themes +- [ ] Persist theme preference in localStorage +- [ ] Update all components in src/components to support dark mode +- [ ] Add theme toggle to app header + +## Acceptance Criteria +- [ ] User can toggle between light and dark themes +- [ ] Theme preference persists across sessions +- [ ] All UI components render correctly in both themes +- [ ] No flash of unstyled content on page load + +## Technical Notes +- Use CSS custom properties +- Store preference as `theme: 'light' | 'dark'` in localStorage +- Add `data-theme` attribute to root element + +Labels: enhancement, ui +``` + +--- + +### /gh-implement + +Implement GitHub issue with full development lifecycle. + +**Usage:** +```bash +/gh-implement 123 +``` + +**Phases:** + +#### Phase 1: Issue Analysis +```bash +# Fetches issue details +gh issue view 123 --json title,body,labels,comments + +# Parses: +- Requirements +- Acceptance criteria +- Technical constraints +- Related discussions +``` + +#### Phase 2: Clarification (if needed) +Claude asks questions about: +- Implementation approach +- Architecture decisions +- Testing strategy +- Edge cases + +#### Phase 3: Development + +**Option A: Simple scope** - Direct `codeagent-wrapper` call: +```bash +codeagent-wrapper --backend codex - <<'EOF' +Implement dark mode toggle based on issue #123: +- Add ThemeToggle component +- Implement CSS variables +- Add localStorage persistence +EOF +``` + +**Option B: Complex scope** - Use `/dev` workflow: +```bash +/dev "implement issue #123: dark mode with theme persistence" +``` + +**Coverage requirement:** ≥90% test coverage enforced + +#### Phase 4: Progress Updates +```bash +# After each milestone +gh issue comment 123 --body "✅ Completed: ThemeToggle component" +gh issue comment 123 --body "✅ Completed: CSS variables setup" +gh issue comment 123 --body "✅ Completed: localStorage persistence" +``` + +#### Phase 5: PR Creation +```bash +gh pr create \ + --title "[#123] Implement dark mode with theme persistence" \ + --body "Closes #123 + +## Changes +- Added ThemeToggle component +- Implemented light/dark CSS variables +- Added localStorage persistence +- Updated all components for theme support + +## Testing +- Unit tests: ThemeToggle, theme utilities +- Integration tests: theme persistence across page loads +- Coverage: 92%" +``` + +**Output:** +``` +✅ PR created: #124 +URL: https://github.com/user/repo/pull/124 +``` + +--- + +## Examples + +### Example 1: Bug Fix + +```bash +# Create issue +/gh-create-issue "login form doesn't validate email" + +# Implement +/gh-implement 125 +``` + +**Process:** +1. Analysis: Parse bug report, identify validation logic +2. Clarification: Confirm expected validation rules +3. Development: Fix validation, add tests +4. Updates: Comment with fix details +5. PR: Link to issue, show test coverage + +--- + +### Example 2: Feature Development + +```bash +# Create issue +/gh-create-issue "add export to CSV feature" + +# Implement +/gh-implement 126 +``` + +**Process:** +1. Analysis: Understand data structure, export requirements +2. Clarification: Which data fields? File naming? Encoding? +3. Development: + - Backend: CSV generation endpoint + - Frontend: Export button + download handler + - Tests: Unit + integration +4. Updates: Milestone comments (backend done, frontend done, tests done) +5. PR: Full feature description with screenshots + +--- + +### Example 3: Refactoring + +```bash +# Create issue +/gh-create-issue "refactor authentication module" + +# Implement +/gh-implement 127 +``` + +**Process:** +1. Analysis: Review current auth code, identify issues +2. Clarification: Scope (just refactor vs add features)? +3. Development: + - Modularize auth logic + - Extract reusable utilities + - Add missing tests + - Update documentation +4. Updates: Component-by-component progress +5. PR: Before/after comparison, test coverage improvement + +--- + +## Workflow Integration + +### With /dev Workflow + +```bash +# Create issue first +/gh-create-issue "implement real-time notifications" + +# Then implement with /dev +/gh-implement 128 + +# Claude will: +# 1. Analyze issue #128 +# 2. Trigger /dev workflow internally +# 3. Execute with 90% coverage requirement +# 4. Post progress updates +# 5. Create PR +``` + +### With Parallel Tasks + +For complex features, `/gh-implement` may use parallel execution: + +```bash +# Internally executes: +codeagent-wrapper --parallel <<'EOF' +---TASK--- +id: backend_notifications +workdir: /project/backend +---CONTENT--- +implement notifications API with WebSocket + +---TASK--- +id: frontend_notifications +workdir: /project/frontend +dependencies: backend_notifications +---CONTENT--- +build Notifications UI component + +---TASK--- +id: tests_notifications +workdir: /project +dependencies: backend_notifications, frontend_notifications +---CONTENT--- +add E2E tests for notification flow +EOF +``` + +--- + +## Configuration + +### Issue Templates + +Create `.github/ISSUE_TEMPLATE/feature.md`: + +```markdown +--- +name: Feature Request +about: Suggest a new feature +labels: enhancement +--- + +## Description + + +## Requirements + + +## Acceptance Criteria + +``` + +### PR Templates + +Create `.github/PULL_REQUEST_TEMPLATE.md`: + +```markdown +## Related Issue +Closes # + +## Changes + + +## Testing + + +## Screenshots (if applicable) + +``` + +--- + +## Best Practices + +1. **Clear issue descriptions** - More context = better implementation +2. **Incremental commits** - Easier to review and rollback +3. **Test-driven** - Write tests before/during implementation +4. **Milestone updates** - Keep issue comments up-to-date +5. **Detailed PRs** - Explain why, not just what + +--- + +## Troubleshooting + +**Issue not found:** +```bash +# Verify issue exists +gh issue view 123 + +# Check repository +gh repo view +``` + +**PR creation failed:** +```bash +# Ensure branch is pushed +git push -u origin feature-branch + +# Check if PR already exists +gh pr list --head feature-branch +``` + +**Authentication error:** +```bash +# Re-authenticate +gh auth login + +# Check token scopes +gh auth status +``` + +--- + +## Advanced Usage + +### Custom Labels + +```bash +# Add labels during issue creation +gh issue create \ + --title "Feature: dark mode" \ + --body "..." \ + --label "enhancement,ui,priority:high" +``` + +### Multiple Assignees + +```bash +# Assign to team members +gh issue create \ + --title "..." \ + --assignee @user1,@user2 +``` + +### Milestone Assignment + +```bash +# Add to milestone +gh issue create \ + --title "..." \ + --milestone "v2.0" +``` + +--- + +## Integration with CI/CD + +### Auto-close on merge + +```yaml +# .github/workflows/pr-merge.yml +name: Close Issues on PR Merge +on: + pull_request: + types: [closed] + +jobs: + close-issues: + if: github.event.pull_request.merged == true + runs-on: ubuntu-latest + steps: + - name: Close linked issues + run: gh issue close ${{ github.event.pull_request.number }} +``` + +### Coverage Check + +```yaml +# .github/workflows/coverage.yml +name: Coverage Check +on: [pull_request] + +jobs: + coverage: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Run tests with coverage + run: go test -coverprofile=coverage.out ./... + - name: Check coverage threshold + run: | + coverage=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//') + if (( $(echo "$coverage < 90" | bc -l) )); then + echo "Coverage $coverage% is below 90% threshold" + exit 1 + fi +``` + +--- + +## Further Reading + +- [GitHub CLI Manual](https://cli.github.com/manual/) +- [Codeagent-Wrapper Guide](./CODEAGENT-WRAPPER.md) +- [Hooks Documentation](./HOOKS.md) +- [Development Workflow](../README.md) diff --git a/docs/HOOKS.md b/docs/HOOKS.md new file mode 100644 index 0000000..af507f6 --- /dev/null +++ b/docs/HOOKS.md @@ -0,0 +1,197 @@ +# Claude Code Hooks Guide + +Hooks are shell scripts or commands that execute in response to Claude Code events. + +## Available Hook Types + +### 1. UserPromptSubmit +Runs after user submits a prompt, before Claude processes it. + +**Use cases:** +- Auto-activate skills based on keywords +- Add context injection +- Log user requests + +### 2. PostToolUse +Runs after Claude uses a tool. + +**Use cases:** +- Validate tool outputs +- Run additional checks (linting, formatting) +- Log tool usage + +### 3. Stop +Runs when Claude Code session ends. + +**Use cases:** +- Cleanup temporary files +- Generate session reports +- Commit changes automatically + +## Configuration + +Hooks are configured in `.claude/settings.json`: + +```json +{ + "hooks": { + "UserPromptSubmit": [ + { + "hooks": [ + { + "type": "command", + "command": "$CLAUDE_PROJECT_DIR/hooks/skill-activation-prompt.sh" + } + ] + } + ], + "PostToolUse": [ + { + "hooks": [ + { + "type": "command", + "command": "$CLAUDE_PROJECT_DIR/hooks/post-tool-check.sh" + } + ] + } + ] + } +} +``` + +## Creating Custom Hooks + +### Example: Pre-Commit Hook + +**File:** `hooks/pre-commit.sh` + +```bash +#!/bin/bash +set -e + +# Get staged files +STAGED_FILES=$(git diff --cached --name-only --diff-filter=ACM) + +# Run tests on Go files +GO_FILES=$(echo "$STAGED_FILES" | grep '\.go$' || true) +if [ -n "$GO_FILES" ]; then + go test ./... -short || exit 1 +fi + +# Validate JSON files +JSON_FILES=$(echo "$STAGED_FILES" | grep '\.json$' || true) +if [ -n "$JSON_FILES" ]; then + for file in $JSON_FILES; do + jq empty "$file" || exit 1 + done +fi + +echo "✅ Pre-commit checks passed" +``` + +**Register in settings.json:** + +```json +{ + "hooks": { + "PostToolUse": [ + { + "hooks": [ + { + "type": "command", + "command": "$CLAUDE_PROJECT_DIR/hooks/pre-commit.sh" + } + ] + } + ] + } +} +``` + +### Example: Auto-Format Hook + +**File:** `hooks/auto-format.sh` + +```bash +#!/bin/bash + +# Format Go files +find . -name "*.go" -exec gofmt -w {} \; + +# Format JSON files +find . -name "*.json" -exec jq --indent 2 . {} \; -exec mv {} {}.tmp \; -exec mv {}.tmp {} \; + +echo "✅ Files formatted" +``` + +## Environment Variables + +Hooks have access to: +- `$CLAUDE_PROJECT_DIR` - Project root directory +- `$PWD` - Current working directory +- All shell environment variables + +## Best Practices + +1. **Keep hooks fast** - Slow hooks block Claude Code +2. **Handle errors gracefully** - Return non-zero on failure +3. **Use absolute paths** - Reference `$CLAUDE_PROJECT_DIR` +4. **Make scripts executable** - `chmod +x hooks/script.sh` +5. **Test independently** - Run hooks manually first +6. **Document behavior** - Add comments explaining logic + +## Debugging Hooks + +Enable verbose logging: + +```bash +# Add to your hook +set -x # Print commands +set -e # Exit on error +``` + +Test manually: + +```bash +cd /path/to/project +./hooks/your-hook.sh +echo $? # Check exit code +``` + +## Built-in Hooks + +This repository includes: + +| Hook | File | Purpose | +|------|------|---------| +| Skill Activation | `skill-activation-prompt.sh` | Auto-suggest skills | +| Pre-commit | `pre-commit.sh` | Code quality checks | + +## Disabling Hooks + +Remove hook configuration from `.claude/settings.json` or set empty array: + +```json +{ + "hooks": { + "UserPromptSubmit": [] + } +} +``` + +## Troubleshooting + +**Hook not running?** +- Check `.claude/settings.json` syntax +- Verify script is executable: `ls -l hooks/` +- Check script path is correct + +**Hook failing silently?** +- Add `set -e` to script +- Check exit codes: `echo $?` +- Add logging: `echo "debug" >> /tmp/hook.log` + +## Further Reading + +- [Claude Code Hooks Documentation](https://docs.anthropic.com/claude-code/hooks) +- [Bash Scripting Guide](https://www.gnu.org/software/bash/manual/) diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 0000000..8438eae --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,502 @@ +# System Architecture + +## Overview + +Multi-agent AI development system with Claude Code as orchestrator and pluggable execution backends. + +## High-Level Architecture + +``` +┌─────────────────────────────────────────────────────────────┐ +│ User │ +└──────────────────┬──────────────────────────────────────────┘ + │ + │ /dev, /gh-implement, etc. + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ Claude Code (Orchestrator) │ +│ ┌─────────────────────────────────────────────────────────┐│ +│ │ - Planning & context gathering ││ +│ │ - Requirements clarification ││ +│ │ - Task breakdown ││ +│ │ - Verification & reporting ││ +│ └─────────────────────────────────────────────────────────┘│ +└──────────────────┬──────────────────────────────────────────┘ + │ + │ via codeagent-wrapper + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ Codeagent-Wrapper (Execution Layer) │ +│ ┌──────────────────────────────────────────────────────────┤ +│ │ Backend Interface │ +│ ├──────────────┬──────────────┬──────────────┐ │ +│ │ Codex │ Claude │ Gemini │ │ +│ │ Backend │ Backend │ Backend │ │ +│ └──────────────┴──────────────┴──────────────┘ │ +│ │ +│ ┌──────────────────────────────────────────────────────────┤ +│ │ Features: │ +│ │ - Multi-backend execution │ +│ │ - JSON stream parsing │ +│ │ - Session management │ +│ │ - Parallel task execution │ +│ │ - Timeout handling │ +│ └──────────────────────────────────────────────────────────┘ +└──────────────────┬──────────────────────────────────────────┘ + │ + │ CLI invocations + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ AI CLI Backends │ +│ ┌──────────────┬──────────────┬──────────────┐ │ +│ │ Codex CLI │ Claude CLI │ Gemini CLI │ │ +│ │ │ │ │ │ +│ │ Code editing │ Reasoning │ Fast proto │ │ +│ └──────────────┴──────────────┴──────────────┘ │ +└─────────────────────────────────────────────────────────────┘ +``` + +## Component Architecture + +### 1. Orchestrator Layer (Claude Code) + +**Responsibilities:** +- User interaction and requirements gathering +- Context analysis and exploration +- Task planning and breakdown +- Workflow coordination +- Verification and reporting + +**Key Workflows:** +``` +/dev +├── Requirements clarification (AskUserQuestion) +├── Codex analysis (Task tool → Explore agent) +├── Dev plan generation (Task tool → dev-plan-generator) +├── Parallel execution (codeagent-wrapper --parallel) +├── Coverage validation (≥90%) +└── Completion summary + +/gh-implement +├── Issue analysis (gh issue view) +├── Clarification (if needed) +├── Development (codeagent-wrapper or /dev) +├── Progress updates (gh issue comment) +└── PR creation (gh pr create) +``` + +### 2. Execution Layer (Codeagent-Wrapper) + +**Architecture:** + +```go +┌─────────────────────────────────────────────────────────┐ +│ Main Entry Point │ +│ - Parse CLI arguments │ +│ - Detect mode (new/resume/parallel) │ +│ - Select backend │ +└──────────────────┬──────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Backend Selection │ +│ func SelectBackend(name string) Backend │ +│ ┌──────────────┬──────────────┬──────────────┐ │ +│ │ CodexBackend │ ClaudeBackend│ GeminiBackend│ │ +│ └──────────────┴──────────────┴──────────────┘ │ +└──────────────────┬──────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Executor │ +│ func RunCodexTask(cfg *Config) (string, error) │ +│ ┌──────────────────────────────────────────────────────┤ +│ │ 1. Build command args via Backend.BuildArgs() │ +│ │ 2. Start process with timeout │ +│ │ 3. Stream stdout/stderr │ +│ │ 4. Parse JSON stream via ParseJSONStream() │ +│ │ 5. Extract session ID │ +│ │ 6. Handle signals (SIGINT, SIGTERM) │ +│ └──────────────────────────────────────────────────────┘ +└──────────────────┬──────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Parser │ +│ func ParseJSONStream(r io.Reader) (string, string) │ +│ ┌──────────────────────────────────────────────────────┤ +│ │ Detects format: │ +│ │ - Codex: {"type":"thread.started","thread_id":...} │ +│ │ - Claude: {"type":"...","subtype":"result"} │ +│ │ - Gemini: {"type":"...","role":"assistant"} │ +│ │ │ +│ │ Extracts: │ +│ │ - Agent messages │ +│ │ - Session IDs │ +│ └──────────────────────────────────────────────────────┘ +└─────────────────────────────────────────────────────────┘ +``` + +**Backend Interface:** + +```go +type Backend interface { + Name() string + Command() string + BuildArgs(cfg *Config, targetArg string) []string +} + +// Codex: codex e --skip-git-repo-check -C --json +// Claude: claude -p --dangerously-skip-permissions --output-format stream-json --verbose +// Gemini: gemini -o stream-json -y -p +``` + +**Key Files:** +- `main.go` - Entry point and orchestration +- `config.go` - CLI argument parsing +- `backend.go` - Backend interface and implementations +- `executor.go` - Process execution and stream handling +- `parser.go` - JSON stream parsing (multi-format) +- `logger.go` - Async logging with ring buffer +- `utils.go` - Helper functions + +### 3. Hooks System + +**Architecture:** + +``` +┌─────────────────────────────────────────────────────────┐ +│ Claude Code Events │ +│ UserPromptSubmit │ PostToolUse │ Stop │ +└──────────────────┬──────────────────────────────────────┘ + │ + │ reads + ▼ +┌─────────────────────────────────────────────────────────┐ +│ .claude/settings.json │ +│ { │ +│ "hooks": { │ +│ "UserPromptSubmit": [ │ +│ { │ +│ "hooks": [ │ +│ { │ +│ "type": "command", │ +│ "command": "$CLAUDE_PROJECT_DIR/hooks/..." │ +│ } │ +│ ] │ +│ } │ +│ ] │ +│ } │ +│ } │ +└──────────────────┬──────────────────────────────────────┘ + │ + │ executes + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Hook Scripts │ +│ ┌────────────────────────────────────────────────────┐ │ +│ │ skill-activation-prompt.sh │ │ +│ │ - Reads skills/skill-rules.json │ │ +│ │ - Matches user prompt against triggers │ │ +│ │ - Injects skill suggestions │ │ +│ └────────────────────────────────────────────────────┘ │ +│ ┌────────────────────────────────────────────────────┐ │ +│ │ pre-commit.sh │ │ +│ │ - Validates staged files │ │ +│ │ - Runs tests │ │ +│ │ - Formats code │ │ +│ └────────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────┘ +``` + +### 4. Skills System + +**Structure:** + +``` +skills/ +├── codex/ +│ └── SKILL.md # Codex CLI integration +├── codeagent/ +│ └── SKILL.md # Multi-backend wrapper +├── gemini/ +│ └── SKILL.md # Gemini CLI integration +└── skill-rules.json # Auto-activation rules +``` + +**skill-rules.json Format:** + +```json +{ + "rules": [ + { + "trigger": { + "pattern": "implement|build|create feature", + "type": "regex" + }, + "skill": "codeagent", + "priority": 1, + "suggestion": "Use codeagent skill for code implementation" + } + ] +} +``` + +## Data Flow + +### Example: /dev Workflow + +``` +1. User: /dev "add user authentication" + │ + ▼ +2. Claude Code: + │ ├─ Clarifies requirements (AskUserQuestion) + │ ├─ Analyzes codebase (Explore agent) + │ └─ Generates dev-plan.md + │ + ▼ +3. Claude Code invokes: codeagent-wrapper --parallel <" --body ""`. +- Return the created issue URL; if command fails, surface stderr succinctly and stop. diff --git a/github-workflow/commands/gh-implement.md b/github-workflow/commands/gh-implement.md new file mode 100644 index 0000000..4a1fc15 --- /dev/null +++ b/github-workflow/commands/gh-implement.md @@ -0,0 +1,28 @@ +--- +description: Implement GitHub issue with full development lifecycle +argument-hint: Issue number (e.g., "123") +--- + +You are the `/gh-implement` workflow orchestrator. Drive the issue-to-PR loop with minimal ceremony and zero fluff. + +## Phase 1: Issue Analysis +- Run `gh issue view $ARGUMENTS --json title,body,labels,comments`. +- Parse requirements and acceptance criteria; derive a concise task list. +- Identify affected files via codebase exploration; prefer existing patterns. + +## Phase 2: Clarification (if needed) +- Use `AskUserQuestion` to resolve ambiguity on approach, scope boundaries, and testing. +- Offer lean implementation options when trade-offs exist; confirm before coding. + +## Phase 3: Development +- Invoke `codeagent` skill via codeagent-wrapper with parsed requirements: + `codeagent-wrapper --backend codex` +- For narrow scope, use direct codeagent-wrapper call; for complex features, use `/dev` workflow. +- Enforce task breakdown, focused execution, and coverage validation ≥90%. + +## Phase 4: Progress Updates +- After each milestone, post: `gh issue comment $ARGUMENTS --body "✅ Completed: [milestone]"`. + +## Phase 5: PR Creation +- Create PR: `gh pr create --title "[#$ARGUMENTS] ..." --body "Closes #$ARGUMENTS"`. +- Return the PR URL; surface errors succinctly and stop on failure. diff --git a/hooks/hooks-config.json b/hooks/hooks-config.json new file mode 100644 index 0000000..85fd49d --- /dev/null +++ b/hooks/hooks-config.json @@ -0,0 +1,12 @@ +{ + "UserPromptSubmit": [ + { + "hooks": [ + { + "type": "command", + "command": "$CLAUDE_PROJECT_DIR/hooks/skill-activation-prompt.sh" + } + ] + } + ] +} diff --git a/hooks/pre-commit.sh b/hooks/pre-commit.sh new file mode 100755 index 0000000..282fa2f --- /dev/null +++ b/hooks/pre-commit.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# Example pre-commit hook +# This hook runs before git commit to validate code quality + +set -e + +# Get staged files +STAGED_FILES=$(git diff --cached --name-only --diff-filter=ACM) + +if [ -z "$STAGED_FILES" ]; then + echo "No files to validate" + exit 0 +fi + +echo "Running pre-commit checks..." + +# Check Go files +GO_FILES=$(echo "$STAGED_FILES" | grep '\.go$' || true) +if [ -n "$GO_FILES" ]; then + echo "Checking Go files..." + + # Format check + gofmt -l $GO_FILES | while read -r file; do + if [ -n "$file" ]; then + echo "❌ $file needs formatting (run: gofmt -w $file)" + exit 1 + fi + done + + # Run tests + if command -v go &> /dev/null; then + echo "Running go tests..." + go test ./... -short || { + echo "❌ Tests failed" + exit 1 + } + fi +fi + +# Check JSON files +JSON_FILES=$(echo "$STAGED_FILES" | grep '\.json$' || true) +if [ -n "$JSON_FILES" ]; then + echo "Validating JSON files..." + for file in $JSON_FILES; do + if ! jq empty "$file" 2>/dev/null; then + echo "❌ Invalid JSON: $file" + exit 1 + fi + done +fi + +# Check Markdown files +MD_FILES=$(echo "$STAGED_FILES" | grep '\.md$' || true) +if [ -n "$MD_FILES" ]; then + echo "Checking markdown files..." + # Add markdown linting if needed +fi + +echo "✅ All pre-commit checks passed" +exit 0 diff --git a/hooks/skill-activation-prompt.js b/hooks/skill-activation-prompt.js new file mode 100644 index 0000000..76d163f --- /dev/null +++ b/hooks/skill-activation-prompt.js @@ -0,0 +1,85 @@ +#!/usr/bin/env node + +const fs = require("fs"); +const path = require("path"); + +function readInput() { + const raw = fs.readFileSync(0, "utf8").trim(); + if (!raw) return {}; + try { + return JSON.parse(raw); + } catch (_err) { + return {}; + } +} + +function extractPrompt(payload) { + return ( + payload.prompt || + payload.text || + payload.userPrompt || + (payload.data && payload.data.prompt) || + "" + ).toString(); +} + +function loadRules() { + const rulesPath = path.resolve(__dirname, "../skills/skill-rules.json"); + try { + const file = fs.readFileSync(rulesPath, "utf8"); + return JSON.parse(file); + } catch (_err) { + return { skills: {} }; + } +} + +function matchSkill(prompt, rule, skillName) { + const triggers = (rule && rule.promptTriggers) || {}; + const keywords = [...(triggers.keywords || []), skillName].filter(Boolean); + const patterns = triggers.intentPatterns || []; + const promptLower = prompt.toLowerCase(); + + const keyword = keywords.find((k) => promptLower.includes(k.toLowerCase())); + if (keyword) { + return `命中关键词 "${keyword}"`; + } + + for (const pattern of patterns) { + try { + if (new RegExp(pattern, "i").test(prompt)) { + return `命中模式 /${pattern}/`; + } + } catch (_err) { + continue; + } + } + return null; +} + +function main() { + const payload = readInput(); + const prompt = extractPrompt(payload); + if (!prompt.trim()) { + console.log(JSON.stringify({ suggestedSkills: [] }, null, 2)); + return; + } + + const rules = loadRules(); + const suggestions = []; + + for (const [name, rule] of Object.entries(rules.skills || {})) { + const matchReason = matchSkill(prompt, rule, name); + if (matchReason) { + suggestions.push({ + skill: name, + enforcement: rule.enforcement || "suggest", + priority: rule.priority || "normal", + reason: matchReason + }); + } + } + + console.log(JSON.stringify({ suggestedSkills: suggestions }, null, 2)); +} + +main(); diff --git a/hooks/skill-activation-prompt.sh b/hooks/skill-activation-prompt.sh new file mode 100755 index 0000000..2b50976 --- /dev/null +++ b/hooks/skill-activation-prompt.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +SCRIPT="$SCRIPT_DIR/skill-activation-prompt.js" + +if command -v node >/dev/null 2>&1; then + node "$SCRIPT" "$@" || true +else + echo '{"suggestedSkills":[],"meta":{"warning":"node not found"}}' +fi + +exit 0 diff --git a/hooks/test-skill-activation.sh b/hooks/test-skill-activation.sh new file mode 100755 index 0000000..72d86d2 --- /dev/null +++ b/hooks/test-skill-activation.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +# Simple test runner for skill-activation-prompt hook. +# Each case feeds JSON to the hook and validates suggested skills. + +set -uo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +HOOK_SCRIPT="$SCRIPT_DIR/skill-activation-prompt.sh" + +parse_skills() { + node -e 'const data = JSON.parse(require("fs").readFileSync(0, "utf8")); const skills = (data.suggestedSkills || []).map(s => s.skill); console.log(skills.join(" "));' +} + +run_case() { + local name="$1" + local input="$2" + shift 2 + local expected=("$@") + + local output skills + output="$("$HOOK_SCRIPT" <<<"$input")" + skills="$(printf "%s" "$output" | parse_skills)" + + local pass=0 + if [[ ${#expected[@]} -eq 1 && ${expected[0]} == "none" ]]; then + [[ -z "$skills" ]] && pass=1 + else + pass=1 + for need in "${expected[@]}"; do + if [[ " $skills " != *" $need "* ]]; then + pass=0 + break + fi + done + fi + + if [[ $pass -eq 1 ]]; then + echo "PASS: $name" + else + echo "FAIL: $name" + echo " input: $input" + echo " expected skills: ${expected[*]}" + echo " actual skills: ${skills:-}" + return 1 + fi +} + +main() { + local status=0 + + run_case "keyword 'issue' => gh-workflow" \ + '{"prompt":"Please open an issue for this bug"}' \ + "gh-workflow" || status=1 + + run_case "keyword 'codex' => codex" \ + '{"prompt":"codex please handle this change"}' \ + "codex" || status=1 + + run_case "no matching keywords => none" \ + '{"prompt":"Just saying hello"}' \ + "none" || status=1 + + run_case "multiple keywords => codex & gh-workflow" \ + '{"prompt":"codex refactor then open an issue"}' \ + "codex" "gh-workflow" || status=1 + + if [[ $status -eq 0 ]]; then + echo "All tests passed." + else + echo "Some tests failed." + fi + + exit "$status" +} + +main "$@" diff --git a/install.py b/install.py index 281b06c..ffae828 100644 --- a/install.py +++ b/install.py @@ -183,6 +183,8 @@ def execute_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[ op_copy_file(op, ctx) elif op_type == "merge_dir": op_merge_dir(op, ctx) + elif op_type == "merge_json": + op_merge_json(op, ctx) elif op_type == "run_command": op_run_command(op, ctx) else: @@ -279,6 +281,51 @@ def op_copy_file(op: Dict[str, Any], ctx: Dict[str, Any]) -> None: write_log({"level": "INFO", "message": f"Copied file {src} -> {dst}"}, ctx) +def op_merge_json(op: Dict[str, Any], ctx: Dict[str, Any]) -> None: + """Merge JSON from source into target, supporting nested key paths.""" + src = _source_path(op, ctx) + dst = _target_path(op, ctx) + merge_key = op.get("merge_key") + + if not src.exists(): + raise FileNotFoundError(f"Source JSON not found: {src}") + + src_data = _load_json(src) + + dst.parent.mkdir(parents=True, exist_ok=True) + if dst.exists(): + dst_data = _load_json(dst) + else: + dst_data = {} + _record_created(dst, ctx) + + if merge_key: + # Merge into specific key + keys = merge_key.split(".") + target = dst_data + for key in keys[:-1]: + target = target.setdefault(key, {}) + + last_key = keys[-1] + if isinstance(src_data, dict) and isinstance(target.get(last_key), dict): + # Deep merge for dicts + target[last_key] = {**target.get(last_key, {}), **src_data} + else: + target[last_key] = src_data + else: + # Merge at root level + if isinstance(src_data, dict) and isinstance(dst_data, dict): + dst_data = {**dst_data, **src_data} + else: + dst_data = src_data + + with dst.open("w", encoding="utf-8") as fh: + json.dump(dst_data, fh, indent=2, ensure_ascii=False) + fh.write("\n") + + write_log({"level": "INFO", "message": f"Merged JSON {src} -> {dst} (key: {merge_key or 'root'})"}, ctx) + + def op_run_command(op: Dict[str, Any], ctx: Dict[str, Any]) -> None: env = os.environ.copy() for key, value in op.get("env", {}).items(): diff --git a/install.sh b/install.sh index 20bb838..a90f87a 100644 --- a/install.sh +++ b/install.sh @@ -22,22 +22,22 @@ esac # Build download URL REPO="cexll/myclaude" VERSION="latest" -BINARY_NAME="codex-wrapper-${OS}-${ARCH}" +BINARY_NAME="codeagent-wrapper-${OS}-${ARCH}" URL="https://github.com/${REPO}/releases/${VERSION}/download/${BINARY_NAME}" -echo "Downloading codex-wrapper from ${URL}..." -if ! curl -fsSL "$URL" -o /tmp/codex-wrapper; then +echo "Downloading codeagent-wrapper from ${URL}..." +if ! curl -fsSL "$URL" -o /tmp/codeagent-wrapper; then echo "ERROR: failed to download binary" >&2 exit 1 fi mkdir -p "$HOME/bin" -mv /tmp/codex-wrapper "$HOME/bin/codex-wrapper" -chmod +x "$HOME/bin/codex-wrapper" +mv /tmp/codeagent-wrapper "$HOME/bin/codeagent-wrapper" +chmod +x "$HOME/bin/codeagent-wrapper" -if "$HOME/bin/codex-wrapper" --version >/dev/null 2>&1; then - echo "codex-wrapper installed successfully to ~/bin/codex-wrapper" +if "$HOME/bin/codeagent-wrapper" --version >/dev/null 2>&1; then + echo "codeagent-wrapper installed successfully to ~/bin/codeagent-wrapper" else echo "ERROR: installation verification failed" >&2 exit 1 diff --git a/skills/codeagent/SKILL.md b/skills/codeagent/SKILL.md new file mode 100644 index 0000000..6744c10 --- /dev/null +++ b/skills/codeagent/SKILL.md @@ -0,0 +1,101 @@ +--- +name: codeagent +description: Execute codeagent-wrapper for multi-backend AI code tasks. Supports Codex, Claude, and Gemini backends with file references (@syntax) and structured output. +--- + +# Codeagent Wrapper Integration + +## Overview + +Execute codeagent-wrapper commands with pluggable AI backends (Codex, Claude, Gemini). Supports file references via `@` syntax and parallel task execution. + +## When to Use + +- Complex code analysis requiring deep understanding +- Large-scale refactoring across multiple files +- Automated code generation with backend selection + +## Usage + +**HEREDOC syntax** (recommended): +```bash +codeagent-wrapper - [working_dir] <<'EOF' + +EOF +``` + +**With backend selection**: +```bash +codeagent-wrapper --backend claude - <<'EOF' + +EOF +``` + +**Simple tasks**: +```bash +codeagent-wrapper "simple task" [working_dir] +codeagent-wrapper --backend gemini "simple task" +``` + +## Backends + +| Backend | Command | Description | +|---------|---------|-------------| +| codex | `--backend codex` | OpenAI Codex (default) | +| claude | `--backend claude` | Anthropic Claude | +| gemini | `--backend gemini` | Google Gemini | + +## Parameters + +- `task` (required): Task description, supports `@file` references +- `working_dir` (optional): Working directory (default: current) +- `--backend` (optional): Select AI backend (codex/claude/gemini) + +## Return Format + +``` +Agent response text here... + +--- +SESSION_ID: 019a7247-ac9d-71f3-89e2-a823dbd8fd14 +``` + +## Resume Session + +```bash +codeagent-wrapper resume - <<'EOF' + +EOF +``` + +## Parallel Execution + +```bash +codeagent-wrapper --parallel <<'EOF' +---TASK--- +id: task1 +workdir: /path/to/dir +---CONTENT--- +task content +---TASK--- +id: task2 +dependencies: task1 +---CONTENT--- +dependent task +EOF +``` + +## Environment Variables + +- `CODEX_TIMEOUT`: Override timeout in milliseconds (default: 7200000) + +## Invocation Pattern + +``` +Bash tool parameters: +- command: codeagent-wrapper --backend - [working_dir] <<'EOF' + + EOF +- timeout: 7200000 +- description: +``` diff --git a/skills/codex/SKILL.md b/skills/codex/SKILL.md index 21da4c7..434aae4 100644 --- a/skills/codex/SKILL.md +++ b/skills/codex/SKILL.md @@ -32,7 +32,7 @@ When falling back to direct execution: **Mandatory**: Run every automated invocation through the Bash tool in the foreground with **HEREDOC syntax** to avoid shell quoting issues, keeping the `timeout` parameter fixed at `7200000` milliseconds (do not change it or use any other entry point). ```bash -codex-wrapper - [working_dir] <<'EOF' +codeagent-wrapper - [working_dir] <<'EOF' EOF ``` @@ -44,12 +44,12 @@ EOF **Simple tasks** (backward compatibility): For simple single-line tasks without special characters, you can still use direct quoting: ```bash -codex-wrapper "simple task here" [working_dir] +codeagent-wrapper "simple task here" [working_dir] ``` **Resume a session with HEREDOC:** ```bash -codex-wrapper resume - [working_dir] <<'EOF' +codeagent-wrapper resume - [working_dir] <<'EOF' EOF ``` @@ -58,7 +58,7 @@ EOF - **Bash/Zsh**: Use `<<'EOF'` (single quotes prevent variable expansion) - **PowerShell 5.1+**: Use `@'` and `'@` (here-string syntax) ```powershell - codex-wrapper - @' + codeagent-wrapper - @' task content '@ ``` @@ -104,7 +104,7 @@ All automated executions must use HEREDOC syntax through the Bash tool in the fo ``` Bash tool parameters: -- command: codex-wrapper - [working_dir] <<'EOF' +- command: codeagent-wrapper - [working_dir] <<'EOF' EOF - timeout: 7200000 @@ -120,18 +120,18 @@ Run every call in the foreground—never append `&` to background it—so logs a **Basic code analysis:** ```bash # Recommended: with HEREDOC (handles any special characters) -codex-wrapper - <<'EOF' +codeagent-wrapper - <<'EOF' explain @src/main.ts EOF # timeout: 7200000 # Alternative: simple direct quoting (if task is simple) -codex-wrapper "explain @src/main.ts" +codeagent-wrapper "explain @src/main.ts" ``` **Refactoring with multiline instructions:** ```bash -codex-wrapper - <<'EOF' +codeagent-wrapper - <<'EOF' refactor @src/utils for performance: - Extract duplicate code into helpers - Use memoization for expensive calculations @@ -142,7 +142,7 @@ EOF **Multi-file analysis:** ```bash -codex-wrapper - "/path/to/project" <<'EOF' +codeagent-wrapper - "/path/to/project" <<'EOF' analyze @. and find security issues: 1. Check for SQL injection vulnerabilities 2. Identify XSS risks in templates @@ -155,13 +155,13 @@ EOF **Resume previous session:** ```bash # First session -codex-wrapper - <<'EOF' +codeagent-wrapper - <<'EOF' add comments to @utils.js explaining the caching logic EOF # Output includes: SESSION_ID: 019a7247-ac9d-71f3-89e2-a823dbd8fd14 # Continue the conversation with more context -codex-wrapper resume 019a7247-ac9d-71f3-89e2-a823dbd8fd14 - <<'EOF' +codeagent-wrapper resume 019a7247-ac9d-71f3-89e2-a823dbd8fd14 - <<'EOF' now add TypeScript type hints and handle edge cases where cache is null EOF # timeout: 7200000 @@ -169,7 +169,7 @@ EOF **Task with code snippets and special characters:** ```bash -codex-wrapper - <<'EOF' +codeagent-wrapper - <<'EOF' Fix the bug in @app.js where the regex /\d+/ doesn't match "123" The current code is: const re = /\d+/; @@ -190,10 +190,10 @@ EOF **Correct:** ```bash # Option 1: file redirection -codex-wrapper --parallel < tasks.txt +codeagent-wrapper --parallel < tasks.txt # Option 2: heredoc (recommended for multiple tasks) -codex-wrapper --parallel <<'EOF' +codeagent-wrapper --parallel <<'EOF' ---TASK--- id: task1 workdir: /path/to/dir @@ -202,28 +202,28 @@ task content EOF # Option 3: pipe -echo "---TASK---..." | codex-wrapper --parallel +echo "---TASK---..." | codeagent-wrapper --parallel ``` **Incorrect (will trigger shell parsing errors):** ```bash # Bad: no extra args allowed after --parallel -codex-wrapper --parallel - /path/to/dir <<'EOF' +codeagent-wrapper --parallel - /path/to/dir <<'EOF' ... EOF # Bad: --parallel does not take a task argument -codex-wrapper --parallel "task description" +codeagent-wrapper --parallel "task description" # Bad: workdir must live inside the task config -codex-wrapper --parallel /path/to/dir < tasks.txt +codeagent-wrapper --parallel /path/to/dir < tasks.txt ``` For multiple independent or dependent tasks, use `--parallel` mode with delimiter format: **Typical Workflow (analyze → implement → test, chained in a single parallel call)**: ```bash -codex-wrapper --parallel <<'EOF' +codeagent-wrapper --parallel <<'EOF' ---TASK--- id: analyze_1732876800 workdir: /home/user/project @@ -243,10 +243,10 @@ dependencies: implement_1732876801 add and run regression tests covering the new endpoints and UI flows EOF ``` -A single `codex-wrapper --parallel` call schedules all three stages concurrently, using `dependencies` to enforce sequential ordering without multiple invocations. +A single `codeagent-wrapper --parallel` call schedules all three stages concurrently, using `dependencies` to enforce sequential ordering without multiple invocations. ```bash -codex-wrapper --parallel <<'EOF' +codeagent-wrapper --parallel <<'EOF' ---TASK--- id: backend_1732876800 workdir: /home/user/project/backend @@ -283,14 +283,14 @@ EOF **Dependencies Best Practices** -- Avoid multiple invocations: Place "analyze then implement" in a single `codex-wrapper --parallel` call, chaining them via `dependencies`, rather than running analysis first and then launching implementation separately. +- Avoid multiple invocations: Place "analyze then implement" in a single `codeagent-wrapper --parallel` call, chaining them via `dependencies`, rather than running analysis first and then launching implementation separately. - Naming convention: Use `_` format (e.g., `analyze_1732876800`, `implement_1732876801`), where action names map to features/stages and timestamps ensure uniqueness and sortability. - Dependency chain design: Keep chains short; only add dependencies for tasks that truly require ordering, let others run in parallel, avoiding over-serialization that reduces throughput. **Resume Failed Tasks**: ```bash # Use session_id from previous output to resume -codex-wrapper --parallel <<'EOF' +codeagent-wrapper --parallel <<'EOF' ---TASK--- id: T2 session_id: 019xxx-previous-session-id diff --git a/skills/skill-rules.json b/skills/skill-rules.json new file mode 100644 index 0000000..26d2e64 --- /dev/null +++ b/skills/skill-rules.json @@ -0,0 +1,44 @@ +{ + "skills": { + "codex": { + "type": "execution", + "enforcement": "suggest", + "priority": "high", + "promptTriggers": { + "keywords": [ + "refactor", + "implement", + "code change", + "bug fix", + "生成代码", + "重构", + "修复" + ], + "intentPatterns": [ + "(refactor|rewrite|optimi[sz]e)\\b", + "(implement|build|write).*(feature|function|module|code)", + "(fix|debug).*(bug|error|issue)" + ] + } + }, + "gh-workflow": { + "type": "domain", + "enforcement": "suggest", + "priority": "high", + "promptTriggers": { + "keywords": [ + "issue", + "pr", + "pull request", + "github", + "gh workflow", + "merge" + ], + "intentPatterns": [ + "(create|open|update|close|review).*(pr|pull request|issue)", + "\\bgithub\\b|\\bgh\\b" + ] + } + } + } +}