Compare commits

...

5 Commits

Author SHA1 Message Date
cexll
19d411a6a2 fix installer bootstrap for do/omo/dev initialization 2026-02-22 18:19:29 +08:00
cexll
791bd03724 fix installer offline fallback and remove codex/gemini skills 2026-02-22 16:45:26 +08:00
cexll
8252b67567 fix(executor): filter codex_core stderr noise 2026-02-18 16:32:59 +08:00
ben
207d3c5436 Merge pull request #147 from AsakiEmura/fix/unset-claudecode-env
fix(executor): unset CLAUDECODE env to prevent nested session rejection
2026-02-18 16:32:18 +08:00
lza
5fe8c24f55 fix(executor): unset CLAUDECODE env to prevent nested session rejection
Claude Code v2.1.41+ sets CLAUDECODE=1 in all child Bash processes and
rejects startup when the variable is present. When codeagent-wrapper
spawns `claude -p` as a subprocess, it inherits this variable and gets
blocked with "cannot be launched inside another Claude Code session".

Add UnsetEnv method to commandRunner interface and strip CLAUDECODE
before spawning the claude backend.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 07:56:25 +08:00
11 changed files with 249 additions and 614 deletions

View File

@@ -15,6 +15,8 @@ const API_HEADERS = {
"User-Agent": "myclaude-npx",
Accept: "application/vnd.github+json",
};
const WRAPPER_REQUIRED_MODULES = new Set(["do", "omo"]);
const WRAPPER_REQUIRED_SKILLS = new Set(["dev"]);
function parseArgs(argv) {
const out = {
@@ -499,9 +501,19 @@ async function updateInstalledModules(installDir, tag, config, dryRun) {
}
await fs.promises.mkdir(installDir, { recursive: true });
const installState = { wrapperInstalled: false };
async function ensureWrapperInstalled() {
if (installState.wrapperInstalled) return;
process.stdout.write("Installing codeagent-wrapper...\n");
await runInstallSh(repoRoot, installDir, tag);
installState.wrapperInstalled = true;
}
for (const name of toUpdate) {
if (WRAPPER_REQUIRED_MODULES.has(name)) await ensureWrapperInstalled();
process.stdout.write(`Updating module: ${name}\n`);
const r = await applyModule(name, config, repoRoot, installDir, true, tag);
const r = await applyModule(name, config, repoRoot, installDir, true, tag, installState);
upsertModuleStatus(installDir, r);
}
} finally {
@@ -777,7 +789,57 @@ async function rmTree(p) {
await fs.promises.rmdir(p, { recursive: true });
}
async function applyModule(moduleName, config, repoRoot, installDir, force, tag) {
function defaultModelsConfig() {
return {
default_backend: "codex",
default_model: "gpt-4.1",
backends: {},
agents: {},
};
}
function mergeModuleAgentsToModels(moduleName, mod, repoRoot) {
const moduleAgents = mod && mod.agents;
if (!isPlainObject(moduleAgents) || !Object.keys(moduleAgents).length) return false;
const modelsPath = path.join(os.homedir(), ".codeagent", "models.json");
fs.mkdirSync(path.dirname(modelsPath), { recursive: true });
let models;
if (fs.existsSync(modelsPath)) {
models = JSON.parse(fs.readFileSync(modelsPath, "utf8"));
} else {
const templatePath = path.join(repoRoot, "templates", "models.json.example");
if (fs.existsSync(templatePath)) {
models = JSON.parse(fs.readFileSync(templatePath, "utf8"));
if (!isPlainObject(models)) models = defaultModelsConfig();
models.agents = {};
} else {
models = defaultModelsConfig();
}
}
if (!isPlainObject(models)) models = defaultModelsConfig();
if (!isPlainObject(models.agents)) models.agents = {};
let modified = false;
for (const [agentName, agentCfg] of Object.entries(moduleAgents)) {
if (!isPlainObject(agentCfg)) continue;
const existing = models.agents[agentName];
const canOverwrite = !isPlainObject(existing) || Object.prototype.hasOwnProperty.call(existing, "__module__");
if (!canOverwrite) continue;
const next = { ...agentCfg, __module__: moduleName };
if (!deepEqual(existing, next)) {
models.agents[agentName] = next;
modified = true;
}
}
if (modified) fs.writeFileSync(modelsPath, JSON.stringify(models, null, 2) + "\n", "utf8");
return modified;
}
async function applyModule(moduleName, config, repoRoot, installDir, force, tag, installState) {
const mod = config && config.modules && config.modules[moduleName];
if (!mod) throw new Error(`Unknown module: ${moduleName}`);
const ops = Array.isArray(mod.operations) ? mod.operations : [];
@@ -803,7 +865,12 @@ async function applyModule(moduleName, config, repoRoot, installDir, force, tag)
if (cmd !== "bash install.sh") {
throw new Error(`Refusing run_command: ${cmd || "(empty)"}`);
}
if (installState && installState.wrapperInstalled) {
result.operations.push({ type, status: "success", skipped: true });
continue;
}
await runInstallSh(repoRoot, installDir, tag);
if (installState) installState.wrapperInstalled = true;
} else {
throw new Error(`Unsupported operation type: ${type}`);
}
@@ -834,6 +901,19 @@ async function applyModule(moduleName, config, repoRoot, installDir, force, tag)
});
}
try {
if (mergeModuleAgentsToModels(moduleName, mod, repoRoot)) {
result.has_agents = true;
result.operations.push({ type: "merge_agents", status: "success" });
}
} catch (err) {
result.operations.push({
type: "merge_agents",
status: "failed",
error: err && err.message ? err.message : String(err),
});
}
return result;
}
@@ -1006,34 +1086,54 @@ async function installSelected(picks, tag, config, installDir, force, dryRun) {
try {
let repoRoot = repoRootFromHere();
if (needRepo || needWrapper) {
if (!tag) throw new Error("No tag available to download");
const archive = path.join(tmp, "src.tgz");
const url = `https://codeload.github.com/${REPO.owner}/${REPO.name}/tar.gz/refs/tags/${encodeURIComponent(
tag
)}`;
process.stdout.write(`Downloading ${REPO.owner}/${REPO.name}@${tag}...\n`);
await downloadToFile(url, archive);
process.stdout.write("Extracting...\n");
const extracted = path.join(tmp, "src");
await extractTarGz(archive, extracted);
repoRoot = extracted;
if (tag) {
const archive = path.join(tmp, "src.tgz");
const url = `https://codeload.github.com/${REPO.owner}/${REPO.name}/tar.gz/refs/tags/${encodeURIComponent(
tag
)}`;
process.stdout.write(`Downloading ${REPO.owner}/${REPO.name}@${tag}...\n`);
await downloadToFile(url, archive);
process.stdout.write("Extracting...\n");
const extracted = path.join(tmp, "src");
await extractTarGz(archive, extracted);
repoRoot = extracted;
} else {
process.stdout.write("Offline mode: installing from local package contents.\n");
}
}
await fs.promises.mkdir(installDir, { recursive: true });
const installState = { wrapperInstalled: false };
async function ensureWrapperInstalled() {
if (installState.wrapperInstalled) return;
process.stdout.write("Installing codeagent-wrapper...\n");
await runInstallSh(repoRoot, installDir, tag);
installState.wrapperInstalled = true;
}
for (const p of picks) {
if (p.kind === "wrapper") {
process.stdout.write("Installing codeagent-wrapper...\n");
await runInstallSh(repoRoot, installDir, tag);
await ensureWrapperInstalled();
continue;
}
if (p.kind === "module") {
if (WRAPPER_REQUIRED_MODULES.has(p.moduleName)) await ensureWrapperInstalled();
process.stdout.write(`Installing module: ${p.moduleName}\n`);
const r = await applyModule(p.moduleName, config, repoRoot, installDir, force, tag);
const r = await applyModule(
p.moduleName,
config,
repoRoot,
installDir,
force,
tag,
installState
);
upsertModuleStatus(installDir, r);
continue;
}
if (p.kind === "skill") {
if (WRAPPER_REQUIRED_SKILLS.has(p.skillName)) await ensureWrapperInstalled();
process.stdout.write(`Installing skill: ${p.skillName}\n`);
await copyDirRecursive(
path.join(repoRoot, "skills", p.skillName),

View File

@@ -169,6 +169,12 @@ func (f *execFakeRunner) Process() executor.ProcessHandle {
return &execFakeProcess{pid: 1}
}
func (f *execFakeRunner) UnsetEnv(keys ...string) {
for _, k := range keys {
delete(f.env, k)
}
}
func TestExecutorRunCodexTaskWithContext(t *testing.T) {
defer resetTestHooks()

View File

@@ -274,6 +274,10 @@ func (d *drainBlockingCmd) Process() executor.ProcessHandle {
return d.inner.Process()
}
func (d *drainBlockingCmd) UnsetEnv(keys ...string) {
d.inner.UnsetEnv(keys...)
}
type bufferWriteCloser struct {
buf bytes.Buffer
mu sync.Mutex
@@ -568,6 +572,14 @@ func (f *fakeCmd) Process() executor.ProcessHandle {
return f.process
}
func (f *fakeCmd) UnsetEnv(keys ...string) {
f.mu.Lock()
defer f.mu.Unlock()
for _, k := range keys {
delete(f.env, k)
}
}
func (f *fakeCmd) runStdoutScript() {
if len(f.stdoutPlan) == 0 {
if !f.keepStdoutOpen {

View File

@@ -41,6 +41,11 @@ func (f *fakeCmd) SetEnv(env map[string]string) {
}
}
func (f *fakeCmd) Process() processHandle { return nil }
func (f *fakeCmd) UnsetEnv(keys ...string) {
for _, k := range keys {
delete(f.env, k)
}
}
func TestEnvInjection_LogsToStderrAndMasksKey(t *testing.T) {
// Arrange ~/.codeagent/models.json via HOME override.

View File

@@ -113,6 +113,7 @@ type commandRunner interface {
SetStderr(io.Writer)
SetDir(string)
SetEnv(env map[string]string)
UnsetEnv(keys ...string)
Process() processHandle
}
@@ -221,6 +222,33 @@ func (r *realCmd) SetEnv(env map[string]string) {
r.cmd.Env = out
}
func (r *realCmd) UnsetEnv(keys ...string) {
if r == nil || r.cmd == nil || len(keys) == 0 {
return
}
// If cmd.Env is nil, Go inherits all parent env vars.
// Populate explicitly so we can selectively remove keys.
if r.cmd.Env == nil {
r.cmd.Env = os.Environ()
}
drop := make(map[string]struct{}, len(keys))
for _, k := range keys {
drop[k] = struct{}{}
}
filtered := make([]string, 0, len(r.cmd.Env))
for _, kv := range r.cmd.Env {
idx := strings.IndexByte(kv, '=')
name := kv
if idx >= 0 {
name = kv[:idx]
}
if _, ok := drop[name]; !ok {
filtered = append(filtered, kv)
}
}
r.cmd.Env = filtered
}
func (r *realCmd) Process() processHandle {
if r == nil || r.cmd == nil || r.cmd.Process == nil {
return nil
@@ -1126,6 +1154,13 @@ func RunCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
injectTempEnv(cmd)
// Claude Code sets CLAUDECODE=1 in its child processes. If we don't
// remove it, the spawned `claude -p` detects the variable and refuses
// to start ("cannot be launched inside another Claude Code session").
if commandName == "claude" {
cmd.UnsetEnv("CLAUDECODE")
}
// For backends that don't support -C flag (claude, gemini), set working directory via cmd.Dir
// Codex passes workdir via -C flag, so we skip setting Dir for it to avoid conflicts
if cfg.Mode != "resume" && commandName != "codex" && cfg.WorkDir != "" {

View File

@@ -20,8 +20,7 @@ var geminiNoisePatterns = []string{
// codexNoisePatterns contains stderr patterns to filter for codex backend
var codexNoisePatterns = []string{
"ERROR codex_core::codex: needs_follow_up:",
"ERROR codex_core::skills::loader:",
"ERROR codex_core::",
}
// filteringWriter wraps an io.Writer and filters out lines matching patterns

View File

@@ -71,3 +71,35 @@ func TestFilteringWriterPartialLines(t *testing.T) {
t.Errorf("got %q, want %q", got, "Hello World\n")
}
}
func TestFilteringWriterCodexNoise(t *testing.T) {
tests := []struct {
name string
input string
want string
}{
{
name: "filter all codex_core errors",
input: "ERROR codex_core::rollout::list: state db missing rollout path for thread 123\nERROR codex_core::skills::loader: missing skill\nVisible output\n",
want: "Visible output\n",
},
{
name: "keep non codex_core errors",
input: "ERROR another_module::state: real failure\nERROR codex_core::codex: needs_follow_up: true\nDone\n",
want: "ERROR another_module::state: real failure\nDone\n",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var buf bytes.Buffer
fw := newFilteringWriter(&buf, codexNoisePatterns)
_, _ = fw.Write([]byte(tt.input))
fw.Flush()
if got := buf.String(); got != tt.want {
t.Errorf("got %q, want %q", got, tt.want)
}
})
}
}

View File

@@ -24,6 +24,7 @@ except ImportError: # pragma: no cover
DEFAULT_INSTALL_DIR = "~/.claude"
SETTINGS_FILE = "settings.json"
WRAPPER_REQUIRED_MODULES = {"do", "omo"}
def _ensure_list(ctx: Dict[str, Any], key: str) -> List[Any]:
@@ -898,6 +899,24 @@ def execute_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[
"installed_at": datetime.now().isoformat(),
}
if name in WRAPPER_REQUIRED_MODULES:
try:
ensure_wrapper_installed(ctx)
result["operations"].append({"type": "ensure_wrapper", "status": "success"})
except Exception as exc: # noqa: BLE001
result["status"] = "failed"
result["operations"].append(
{"type": "ensure_wrapper", "status": "failed", "error": str(exc)}
)
write_log(
{
"level": "ERROR",
"message": f"Module {name} failed on ensure_wrapper: {exc}",
},
ctx,
)
raise
for op in cfg.get("operations", []):
op_type = op.get("type")
try:
@@ -1081,8 +1100,13 @@ def op_run_command(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
for key, value in op.get("env", {}).items():
env[key] = value.replace("${install_dir}", str(ctx["install_dir"]))
command = op.get("command", "")
if sys.platform == "win32" and command.strip() == "bash install.sh":
raw_command = str(op.get("command", "")).strip()
if raw_command == "bash install.sh" and ctx.get("_wrapper_installed"):
write_log({"level": "INFO", "message": "Skip wrapper install; already installed in this run"}, ctx)
return
command = raw_command
if sys.platform == "win32" and raw_command == "bash install.sh":
command = "cmd /c install.bat"
# Stream output in real-time while capturing for logging
@@ -1156,6 +1180,22 @@ def op_run_command(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
if process.returncode != 0:
raise RuntimeError(f"Command failed with code {process.returncode}: {command}")
if raw_command == "bash install.sh":
ctx["_wrapper_installed"] = True
def ensure_wrapper_installed(ctx: Dict[str, Any]) -> None:
if ctx.get("_wrapper_installed"):
return
op_run_command(
{
"type": "run_command",
"command": "bash install.sh",
"env": {"INSTALL_DIR": "${install_dir}"},
},
ctx,
)
def write_log(entry: Dict[str, Any], ctx: Dict[str, Any]) -> None:
log_path = Path(ctx["log_file"])

View File

@@ -1,334 +0,0 @@
---
name: codex
description: Execute Codex CLI for code analysis, refactoring, and automated code changes. Use when you need to delegate complex code tasks to Codex AI with file references (@syntax) and structured output.
---
# Codex CLI Integration
## Overview
Execute Codex CLI commands and parse structured JSON responses. Supports file references via `@` syntax, multiple models, and sandbox controls.
## When to Use
- Complex code analysis requiring deep understanding
- Large-scale refactoring across multiple files
- Automated code generation with safety controls
## Fallback Policy
Codex is the **primary execution method** for all code edits and tests. Direct execution is only permitted when:
1. Codex is unavailable (service down, network issues)
2. Codex fails **twice consecutively** on the same task
When falling back to direct execution:
- Log `CODEX_FALLBACK` with the reason
- Retry Codex on the next task (don't permanently switch)
- Document the fallback in the final summary
## Usage
**Mandatory**: Run every automated invocation through the Bash tool in the foreground with **HEREDOC syntax** to avoid shell quoting issues, keeping the `timeout` parameter fixed at `7200000` milliseconds (do not change it or use any other entry point).
```bash
codex-wrapper - [working_dir] <<'EOF'
<task content here>
EOF
```
**Why HEREDOC?** Tasks often contain code blocks, nested quotes, shell metacharacters (`$`, `` ` ``, `\`), and multiline text. HEREDOC (Here Document) syntax passes these safely without shell interpretation, eliminating quote-escaping nightmares.
**Foreground only (no background/BashOutput)**: Never set `background: true`, never accept Claude's "Running in the background" mode, and avoid `BashOutput` streaming loops. Keep a single foreground Bash call per Codex task; if work might be long, split it into smaller foreground runs instead of offloading to background execution.
**Simple tasks** (backward compatibility):
For simple single-line tasks without special characters, you can still use direct quoting:
```bash
codex-wrapper "simple task here" [working_dir]
```
**Resume a session with HEREDOC:**
```bash
codex-wrapper resume <session_id> - [working_dir] <<'EOF'
<task content>
EOF
```
**Cross-platform notes:**
- **Bash/Zsh**: Use `<<'EOF'` (single quotes prevent variable expansion)
- **PowerShell 5.1+**: Use `@'` and `'@` (here-string syntax)
```powershell
codex-wrapper - @'
task content
'@
```
## Environment Variables
- **CODEX_TIMEOUT**: Override timeout in milliseconds (default: 7200000 = 2 hours)
- Example: `export CODEX_TIMEOUT=3600000` for 1 hour
## Timeout Control
- **Built-in**: Binary enforces 2-hour timeout by default
- **Override**: Set `CODEX_TIMEOUT` environment variable (in milliseconds, e.g., `CODEX_TIMEOUT=3600000` for 1 hour)
- **Behavior**: On timeout, sends SIGTERM, then SIGKILL after 5s if process doesn't exit
- **Exit code**: Returns 124 on timeout (consistent with GNU timeout)
- **Bash tool**: Always set `timeout: 7200000` parameter for double protection
### Parameters
- `task` (required): Task description, supports `@file` references
- `working_dir` (optional): Working directory (default: current)
### Return Format
Extracts `agent_message` from Codex JSON stream and appends session ID:
```
Agent response text here...
---
SESSION_ID: 019a7247-ac9d-71f3-89e2-a823dbd8fd14
```
Error format (stderr):
```
ERROR: Error message
```
Return only the final agent message and session ID—do not paste raw `BashOutput` logs or background-task chatter into the conversation.
### Invocation Pattern
All automated executions must use HEREDOC syntax through the Bash tool in the foreground, with `timeout` fixed at `7200000` (non-negotiable):
```
Bash tool parameters:
- command: codex-wrapper - [working_dir] <<'EOF'
<task content>
EOF
- timeout: 7200000
- description: <brief description of the task>
```
Run every call in the foreground—never append `&` to background it—so logs and errors stay visible for timely interruption or diagnosis.
**Important:** Use HEREDOC (`<<'EOF'`) for all but the simplest tasks. This prevents shell interpretation of quotes, variables, and special characters.
### Examples
**Basic code analysis:**
```bash
# Recommended: with HEREDOC (handles any special characters)
codex-wrapper - <<'EOF'
explain @src/main.ts
EOF
# timeout: 7200000
# Alternative: simple direct quoting (if task is simple)
codex-wrapper "explain @src/main.ts"
```
**Refactoring with multiline instructions:**
```bash
codex-wrapper - <<'EOF'
refactor @src/utils for performance:
- Extract duplicate code into helpers
- Use memoization for expensive calculations
- Add inline comments for non-obvious logic
EOF
# timeout: 7200000
```
**Multi-file analysis:**
```bash
codex-wrapper - "/path/to/project" <<'EOF'
analyze @. and find security issues:
1. Check for SQL injection vulnerabilities
2. Identify XSS risks in templates
3. Review authentication/authorization logic
4. Flag hardcoded credentials or secrets
EOF
# timeout: 7200000
```
**Resume previous session:**
```bash
# First session
codex-wrapper - <<'EOF'
add comments to @utils.js explaining the caching logic
EOF
# Output includes: SESSION_ID: 019a7247-ac9d-71f3-89e2-a823dbd8fd14
# Continue the conversation with more context
codex-wrapper resume 019a7247-ac9d-71f3-89e2-a823dbd8fd14 - <<'EOF'
now add TypeScript type hints and handle edge cases where cache is null
EOF
# timeout: 7200000
```
**Task with code snippets and special characters:**
```bash
codex-wrapper - <<'EOF'
Fix the bug in @app.js where the regex /\d+/ doesn't match "123"
The current code is:
const re = /\d+/;
if (re.test(input)) { ... }
Add proper escaping and handle $variables correctly.
EOF
```
### Parallel Execution
> Important:
> - `--parallel` only reads task definitions from stdin.
> - It does not accept extra command-line arguments (no inline `workdir`, `task`, or other params).
> - Put all task metadata and content in stdin; nothing belongs after `--parallel` on the command line.
**Correct vs Incorrect Usage**
**Correct:**
```bash
# Option 1: file redirection
codex-wrapper --parallel < tasks.txt
# Option 2: heredoc (recommended for multiple tasks)
codex-wrapper --parallel <<'EOF'
---TASK---
id: task1
workdir: /path/to/dir
---CONTENT---
task content
EOF
# Option 3: pipe
echo "---TASK---..." | codex-wrapper --parallel
```
**Incorrect (will trigger shell parsing errors):**
```bash
# Bad: no extra args allowed after --parallel
codex-wrapper --parallel - /path/to/dir <<'EOF'
...
EOF
# Bad: --parallel does not take a task argument
codex-wrapper --parallel "task description"
# Bad: workdir must live inside the task config
codex-wrapper --parallel /path/to/dir < tasks.txt
```
For multiple independent or dependent tasks, use `--parallel` mode with delimiter format:
**Typical Workflow (analyze → implement → test, chained in a single parallel call)**:
```bash
codex-wrapper --parallel <<'EOF'
---TASK---
id: analyze_1732876800
workdir: /home/user/project
---CONTENT---
analyze @spec.md and summarize API and UI requirements
---TASK---
id: implement_1732876801
workdir: /home/user/project
dependencies: analyze_1732876800
---CONTENT---
implement features from analyze_1732876800 summary in backend @services and frontend @ui
---TASK---
id: test_1732876802
workdir: /home/user/project
dependencies: implement_1732876801
---CONTENT---
add and run regression tests covering the new endpoints and UI flows
EOF
```
A single `codex-wrapper --parallel` call schedules all three stages concurrently, using `dependencies` to enforce sequential ordering without multiple invocations.
```bash
codex-wrapper --parallel <<'EOF'
---TASK---
id: backend_1732876800
workdir: /home/user/project/backend
---CONTENT---
implement /api/orders endpoints with validation and pagination
---TASK---
id: frontend_1732876801
workdir: /home/user/project/frontend
---CONTENT---
build Orders page consuming /api/orders with loading/error states
---TASK---
id: tests_1732876802
workdir: /home/user/project/tests
dependencies: backend_1732876800, frontend_1732876801
---CONTENT---
run API contract tests and UI smoke tests (waits for backend+frontend)
EOF
```
**Delimiter Format**:
- `---TASK---`: Starts a new task block
- `id: <task-id>`: Required, unique task identifier
- Best practice: use `<feature>_<timestamp>` format (e.g., `auth_1732876800`, `api_test_1732876801`)
- Ensures uniqueness across runs and makes tasks traceable
- `workdir: <path>`: Optional, working directory (default: `.`)
- Best practice: use absolute paths (e.g., `/home/user/project/backend`)
- Avoids ambiguity and ensures consistent behavior across environments
- Must be specified inside each task block; do not pass `workdir` as a CLI argument to `--parallel`
- Each task can set its own `workdir` when different directories are needed
- `dependencies: <id1>, <id2>`: Optional, comma-separated task IDs
- `session_id: <uuid>`: Optional, resume a previous session
- `---CONTENT---`: Separates metadata from task content
- Task content: Any text, code, special characters (no escaping needed)
**Dependencies Best Practices**
- Avoid multiple invocations: Place "analyze then implement" in a single `codex-wrapper --parallel` call, chaining them via `dependencies`, rather than running analysis first and then launching implementation separately.
- Naming convention: Use `<action>_<timestamp>` format (e.g., `analyze_1732876800`, `implement_1732876801`), where action names map to features/stages and timestamps ensure uniqueness and sortability.
- Dependency chain design: Keep chains short; only add dependencies for tasks that truly require ordering, let others run in parallel, avoiding over-serialization that reduces throughput.
**Resume Failed Tasks**:
```bash
# Use session_id from previous output to resume
codex-wrapper --parallel <<'EOF'
---TASK---
id: T2
session_id: 019xxx-previous-session-id
---CONTENT---
fix the previous error and retry
EOF
```
**Output**: Human-readable text format
```
=== Parallel Execution Summary ===
Total: 3 | Success: 2 | Failed: 1
--- Task: T1 ---
Status: SUCCESS
Session: 019xxx
Task output message...
--- Task: T2 ---
Status: FAILED (exit code 1)
Error: some error message
```
**Features**:
- Automatic topological sorting based on dependencies
- Unlimited concurrency for independent tasks
- Error isolation (failed tasks don't stop others)
- Dependency blocking (dependent tasks skip if parent fails)
## Notes
- **Binary distribution**: Single Go binary, zero dependencies
- **Installation**: Download from GitHub Releases or use install.sh
- **Cross-platform compatible**: Linux (amd64/arm64), macOS (amd64/arm64)
- All automated runs must use the Bash tool with the fixed timeout to provide dual timeout protection and unified logging/exit semantics
for automation (new sessions only)
- Uses `--skip-git-repo-check` to work in any directory
- Streams progress, returns only final agent message
- Every execution returns a session ID for resuming conversations
- Requires Codex CLI installed and authenticated

View File

@@ -1,120 +0,0 @@
---
name: gemini
description: Execute Gemini CLI for AI-powered code analysis and generation. Use when you need to leverage Google's Gemini models for complex reasoning tasks.
---
# Gemini CLI Integration
## Overview
Execute Gemini CLI commands with support for multiple models and flexible prompt input. Integrates Google's Gemini AI models into Claude Code workflows.
## When to Use
- Complex reasoning tasks requiring advanced AI capabilities
- Code generation and analysis with Gemini models
- Tasks requiring Google's latest AI technology
- Alternative perspective on code problems
## Usage
**Mandatory**: Run via uv with fixed timeout 7200000ms (foreground):
```bash
uv run ~/.claude/skills/gemini/scripts/gemini.py "<prompt>" [working_dir]
```
**Optional** (direct execution or using Python):
```bash
~/.claude/skills/gemini/scripts/gemini.py "<prompt>" [working_dir]
# or
python3 ~/.claude/skills/gemini/scripts/gemini.py "<prompt>" [working_dir]
```
## Environment Variables
- **GEMINI_MODEL**: Configure model (default: `gemini-3-pro-preview`)
- Example: `export GEMINI_MODEL=gemini-3`
## Timeout Control
- **Fixed**: 7200000 milliseconds (2 hours), immutable
- **Bash tool**: Always set `timeout: 7200000` for double protection
### Parameters
- `prompt` (required): Task prompt or question
- `working_dir` (optional): Working directory (default: current directory)
### Return Format
Plain text output from Gemini:
```text
Model response text here...
```
Error format (stderr):
```text
ERROR: Error message
```
### Invocation Pattern
When calling via Bash tool, always include the timeout parameter:
```yaml
Bash tool parameters:
- command: uv run ~/.claude/skills/gemini/scripts/gemini.py "<prompt>"
- timeout: 7200000
- description: <brief description of the task>
```
Alternatives:
```yaml
# Direct execution (simplest)
- command: ~/.claude/skills/gemini/scripts/gemini.py "<prompt>"
# Using python3
- command: python3 ~/.claude/skills/gemini/scripts/gemini.py "<prompt>"
```
### Examples
**Basic query:**
```bash
uv run ~/.claude/skills/gemini/scripts/gemini.py "explain quantum computing"
# timeout: 7200000
```
**Code analysis:**
```bash
uv run ~/.claude/skills/gemini/scripts/gemini.py "review this code for security issues: $(cat app.py)"
# timeout: 7200000
```
**With specific working directory:**
```bash
uv run ~/.claude/skills/gemini/scripts/gemini.py "analyze project structure" "/path/to/project"
# timeout: 7200000
```
**Using python3 directly (alternative):**
```bash
python3 ~/.claude/skills/gemini/scripts/gemini.py "your prompt here"
```
## Notes
- **Recommended**: Use `uv run` for automatic Python environment management (requires uv installed)
- **Alternative**: Direct execution `./gemini.py` (uses system Python via shebang)
- Python implementation using standard library (zero dependencies)
- Cross-platform compatible (Windows/macOS/Linux)
- PEP 723 compliant (inline script metadata)
- Requires Gemini CLI installed and authenticated
- Supports all Gemini model variants (configure via `GEMINI_MODEL` environment variable)
- Output is streamed directly from Gemini CLI

View File

@@ -1,140 +0,0 @@
#!/usr/bin/env python3
# /// script
# requires-python = ">=3.8"
# dependencies = []
# ///
"""
Gemini CLI wrapper with cross-platform support.
Usage:
uv run gemini.py "<prompt>" [workdir]
python3 gemini.py "<prompt>"
./gemini.py "your prompt"
"""
import subprocess
import sys
import os
DEFAULT_MODEL = os.environ.get('GEMINI_MODEL', 'gemini-3-pro-preview')
DEFAULT_WORKDIR = '.'
TIMEOUT_MS = 7_200_000 # 固定 2 小时,毫秒
DEFAULT_TIMEOUT = TIMEOUT_MS // 1000
FORCE_KILL_DELAY = 5
def log_error(message: str):
"""输出错误信息到 stderr"""
sys.stderr.write(f"ERROR: {message}\n")
def log_warn(message: str):
"""输出警告信息到 stderr"""
sys.stderr.write(f"WARN: {message}\n")
def log_info(message: str):
"""输出信息到 stderr"""
sys.stderr.write(f"INFO: {message}\n")
def parse_args():
"""解析位置参数"""
if len(sys.argv) < 2:
log_error('Prompt required')
sys.exit(1)
return {
'prompt': sys.argv[1],
'workdir': sys.argv[2] if len(sys.argv) > 2 else DEFAULT_WORKDIR
}
def build_gemini_args(args) -> list:
"""构建 gemini CLI 参数"""
return [
'gemini',
'-m', DEFAULT_MODEL,
'-p', args['prompt']
]
def main():
log_info('Script started')
args = parse_args()
log_info(f"Prompt length: {len(args['prompt'])}")
log_info(f"Working dir: {args['workdir']}")
gemini_args = build_gemini_args(args)
timeout_sec = DEFAULT_TIMEOUT
log_info(f"Timeout: {timeout_sec}s")
# 如果指定了工作目录,切换到该目录
if args['workdir'] != DEFAULT_WORKDIR:
try:
os.chdir(args['workdir'])
except FileNotFoundError:
log_error(f"Working directory not found: {args['workdir']}")
sys.exit(1)
except PermissionError:
log_error(f"Permission denied: {args['workdir']}")
sys.exit(1)
log_info('Changed working directory')
try:
log_info(f"Starting gemini with model {DEFAULT_MODEL}")
process = None
# 启动 gemini 子进程,直接透传 stdout 和 stderr
process = subprocess.Popen(
gemini_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1 # 行缓冲
)
# 实时输出 stdout
for line in process.stdout:
sys.stdout.write(line)
sys.stdout.flush()
# 等待进程结束
returncode = process.wait(timeout=timeout_sec)
# 读取 stderr
stderr_output = process.stderr.read()
if stderr_output:
sys.stderr.write(stderr_output)
# 检查退出码
if returncode != 0:
log_error(f'Gemini exited with status {returncode}')
sys.exit(returncode)
sys.exit(0)
except subprocess.TimeoutExpired:
log_error(f'Gemini execution timeout ({timeout_sec}s)')
if process is not None:
process.kill()
try:
process.wait(timeout=FORCE_KILL_DELAY)
except subprocess.TimeoutExpired:
pass
sys.exit(124)
except FileNotFoundError:
log_error("gemini command not found in PATH")
log_error("Please install Gemini CLI: https://github.com/google/generative-ai-python")
sys.exit(127)
except KeyboardInterrupt:
if process is not None:
process.terminate()
try:
process.wait(timeout=FORCE_KILL_DELAY)
except subprocess.TimeoutExpired:
process.kill()
sys.exit(130)
if __name__ == '__main__':
main()