mirror of
https://github.com/cexll/myclaude.git
synced 2026-02-05 02:30:26 +08:00
Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd3115446d | ||
|
|
2b8bfd714c | ||
|
|
71485558df | ||
|
|
b711b44c0e | ||
|
|
eda2475543 | ||
|
|
2c0553794a | ||
|
|
c96193fca6 | ||
|
|
e2cd5be812 | ||
|
|
3dfa447f10 | ||
|
|
e9a8013c6f | ||
|
|
3d76d46336 |
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@@ -8,7 +8,10 @@ on:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -21,11 +24,13 @@ jobs:
|
||||
run: |
|
||||
cd codeagent-wrapper
|
||||
go test -v -cover -coverprofile=coverage.out ./...
|
||||
shell: bash
|
||||
|
||||
- name: Check coverage
|
||||
run: |
|
||||
cd codeagent-wrapper
|
||||
go tool cover -func=coverage.out | grep total | awk '{print $3}'
|
||||
shell: bash
|
||||
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,3 +8,4 @@ __pycache__
|
||||
.coverage
|
||||
coverage.out
|
||||
references
|
||||
output/
|
||||
|
||||
@@ -36,10 +36,15 @@ npx github:cexll/myclaude
|
||||
# List installable items (modules / skills / wrapper)
|
||||
npx github:cexll/myclaude --list
|
||||
|
||||
# Detect installed modules and update from GitHub
|
||||
npx github:cexll/myclaude --update
|
||||
|
||||
# Custom install directory / overwrite
|
||||
npx github:cexll/myclaude --install-dir ~/.claude --force
|
||||
```
|
||||
|
||||
`--update` detects already installed modules in the target install dir (defaults to `~/.claude`, via `installed_modules.json` when present) and updates them from GitHub (latest release) by overwriting the module files.
|
||||
|
||||
### Module Configuration
|
||||
|
||||
Edit `config.json` to enable/disable modules:
|
||||
|
||||
@@ -174,10 +174,15 @@ npx github:cexll/myclaude
|
||||
# 列出可安装项(module:* / skill:* / codeagent-wrapper)
|
||||
npx github:cexll/myclaude --list
|
||||
|
||||
# 检测已安装 modules 并从 GitHub 更新
|
||||
npx github:cexll/myclaude --update
|
||||
|
||||
# 指定安装目录 / 强制覆盖
|
||||
npx github:cexll/myclaude --install-dir ~/.claude --force
|
||||
```
|
||||
|
||||
`--update` 会在目标安装目录(默认 `~/.claude`,优先读取 `installed_modules.json`)检测已安装 modules,并从 GitHub 拉取最新发布版本覆盖更新。
|
||||
|
||||
### 模块配置
|
||||
|
||||
编辑 `config.json` 启用/禁用模块:
|
||||
|
||||
444
bin/cli.js
444
bin/cli.js
@@ -18,15 +18,24 @@ const API_HEADERS = {
|
||||
|
||||
function parseArgs(argv) {
|
||||
const out = {
|
||||
command: "install",
|
||||
installDir: "~/.claude",
|
||||
force: false,
|
||||
dryRun: false,
|
||||
list: false,
|
||||
update: false,
|
||||
tag: null,
|
||||
module: null,
|
||||
yes: false,
|
||||
};
|
||||
|
||||
for (let i = 0; i < argv.length; i++) {
|
||||
let i = 0;
|
||||
if (argv[i] && !argv[i].startsWith("-")) {
|
||||
out.command = argv[i];
|
||||
i++;
|
||||
}
|
||||
|
||||
for (; i < argv.length; i++) {
|
||||
const a = argv[i];
|
||||
if (a === "--install-dir") out.installDir = argv[++i];
|
||||
else if (a === "--force") out.force = true;
|
||||
@@ -34,6 +43,8 @@ function parseArgs(argv) {
|
||||
else if (a === "--list") out.list = true;
|
||||
else if (a === "--update") out.update = true;
|
||||
else if (a === "--tag") out.tag = argv[++i];
|
||||
else if (a === "--module") out.module = argv[++i];
|
||||
else if (a === "-y" || a === "--yes") out.yes = true;
|
||||
else if (a === "-h" || a === "--help") out.help = true;
|
||||
else throw new Error(`Unknown arg: ${a}`);
|
||||
}
|
||||
@@ -51,6 +62,8 @@ function printHelp() {
|
||||
" npx github:cexll/myclaude --list",
|
||||
" npx github:cexll/myclaude --update",
|
||||
" npx github:cexll/myclaude --install-dir ~/.claude --force",
|
||||
" npx github:cexll/myclaude uninstall",
|
||||
" npx github:cexll/myclaude uninstall --module bmad,do -y",
|
||||
"",
|
||||
"Options:",
|
||||
" --install-dir <path> Default: ~/.claude",
|
||||
@@ -59,6 +72,8 @@ function printHelp() {
|
||||
" --list List installable items and exit",
|
||||
" --update Update already installed modules",
|
||||
" --tag <tag> Install a specific GitHub tag",
|
||||
" --module <names> For uninstall: comma-separated module names",
|
||||
" -y, --yes For uninstall: skip confirmation prompt",
|
||||
].join("\n") + "\n"
|
||||
);
|
||||
}
|
||||
@@ -202,6 +217,187 @@ function readInstalledModuleNamesFromStatus(installDir) {
|
||||
}
|
||||
}
|
||||
|
||||
function loadInstalledStatus(installDir) {
|
||||
const p = path.join(installDir, "installed_modules.json");
|
||||
if (!fs.existsSync(p)) return { modules: {} };
|
||||
try {
|
||||
const json = JSON.parse(fs.readFileSync(p, "utf8"));
|
||||
const modules = json && json.modules;
|
||||
if (!modules || typeof modules !== "object" || Array.isArray(modules)) return { modules: {} };
|
||||
return { ...json, modules };
|
||||
} catch {
|
||||
return { modules: {} };
|
||||
}
|
||||
}
|
||||
|
||||
function saveInstalledStatus(installDir, status) {
|
||||
const p = path.join(installDir, "installed_modules.json");
|
||||
fs.mkdirSync(installDir, { recursive: true });
|
||||
fs.writeFileSync(p, JSON.stringify(status, null, 2) + "\n", "utf8");
|
||||
}
|
||||
|
||||
function upsertModuleStatus(installDir, moduleResult) {
|
||||
const status = loadInstalledStatus(installDir);
|
||||
status.modules = status.modules || {};
|
||||
status.modules[moduleResult.module] = moduleResult;
|
||||
status.updated_at = new Date().toISOString();
|
||||
saveInstalledStatus(installDir, status);
|
||||
}
|
||||
|
||||
function deleteModuleStatus(installDir, moduleName) {
|
||||
const status = loadInstalledStatus(installDir);
|
||||
if (status.modules && Object.prototype.hasOwnProperty.call(status.modules, moduleName)) {
|
||||
delete status.modules[moduleName];
|
||||
status.updated_at = new Date().toISOString();
|
||||
saveInstalledStatus(installDir, status);
|
||||
}
|
||||
}
|
||||
|
||||
function loadSettings(installDir) {
|
||||
const p = path.join(installDir, "settings.json");
|
||||
if (!fs.existsSync(p)) return {};
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(p, "utf8"));
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function saveSettings(installDir, settings) {
|
||||
const p = path.join(installDir, "settings.json");
|
||||
fs.mkdirSync(installDir, { recursive: true });
|
||||
fs.writeFileSync(p, JSON.stringify(settings, null, 2) + "\n", "utf8");
|
||||
}
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function deepEqual(a, b) {
|
||||
if (a === b) return true;
|
||||
if (Array.isArray(a) && Array.isArray(b)) {
|
||||
if (a.length !== b.length) return false;
|
||||
for (let i = 0; i < a.length; i++) if (!deepEqual(a[i], b[i])) return false;
|
||||
return true;
|
||||
}
|
||||
if (isPlainObject(a) && isPlainObject(b)) {
|
||||
const aKeys = Object.keys(a);
|
||||
const bKeys = Object.keys(b);
|
||||
if (aKeys.length !== bKeys.length) return false;
|
||||
for (const k of aKeys) {
|
||||
if (!Object.prototype.hasOwnProperty.call(b, k)) return false;
|
||||
if (!deepEqual(a[k], b[k])) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function hooksEqual(h1, h2) {
|
||||
if (!isPlainObject(h1) || !isPlainObject(h2)) return false;
|
||||
const a = { ...h1 };
|
||||
const b = { ...h2 };
|
||||
delete a.__module__;
|
||||
delete b.__module__;
|
||||
return deepEqual(a, b);
|
||||
}
|
||||
|
||||
function replaceHookVariables(obj, pluginRoot) {
|
||||
if (typeof obj === "string") return obj.replace(/\$\{CLAUDE_PLUGIN_ROOT\}/g, pluginRoot);
|
||||
if (Array.isArray(obj)) return obj.map((v) => replaceHookVariables(v, pluginRoot));
|
||||
if (isPlainObject(obj)) {
|
||||
const out = {};
|
||||
for (const [k, v] of Object.entries(obj)) out[k] = replaceHookVariables(v, pluginRoot);
|
||||
return out;
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function mergeHooksToSettings(moduleName, hooksConfig, installDir, pluginRoot) {
|
||||
if (!hooksConfig || !isPlainObject(hooksConfig)) return false;
|
||||
const rawHooks = hooksConfig.hooks;
|
||||
if (!rawHooks || !isPlainObject(rawHooks)) return false;
|
||||
|
||||
const settings = loadSettings(installDir);
|
||||
if (!settings.hooks || !isPlainObject(settings.hooks)) settings.hooks = {};
|
||||
|
||||
const moduleHooks = pluginRoot ? replaceHookVariables(rawHooks, pluginRoot) : rawHooks;
|
||||
let modified = false;
|
||||
|
||||
for (const [hookType, hookEntries] of Object.entries(moduleHooks)) {
|
||||
if (!Array.isArray(hookEntries)) continue;
|
||||
if (!Array.isArray(settings.hooks[hookType])) settings.hooks[hookType] = [];
|
||||
|
||||
for (const entry of hookEntries) {
|
||||
if (!isPlainObject(entry)) continue;
|
||||
const entryCopy = { ...entry, __module__: moduleName };
|
||||
|
||||
let exists = false;
|
||||
for (const existing of settings.hooks[hookType]) {
|
||||
if (existing && existing.__module__ === moduleName && hooksEqual(existing, entryCopy)) {
|
||||
exists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!exists) {
|
||||
settings.hooks[hookType].push(entryCopy);
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (modified) saveSettings(installDir, settings);
|
||||
return modified;
|
||||
}
|
||||
|
||||
function unmergeHooksFromSettings(moduleName, installDir) {
|
||||
const settings = loadSettings(installDir);
|
||||
if (!settings.hooks || !isPlainObject(settings.hooks)) return false;
|
||||
|
||||
let modified = false;
|
||||
for (const hookType of Object.keys(settings.hooks)) {
|
||||
const entries = settings.hooks[hookType];
|
||||
if (!Array.isArray(entries)) continue;
|
||||
const kept = entries.filter((e) => !(e && e.__module__ === moduleName));
|
||||
if (kept.length !== entries.length) {
|
||||
settings.hooks[hookType] = kept;
|
||||
modified = true;
|
||||
}
|
||||
if (!settings.hooks[hookType].length) {
|
||||
delete settings.hooks[hookType];
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (modified) saveSettings(installDir, settings);
|
||||
return modified;
|
||||
}
|
||||
|
||||
function mergeModuleHooks(moduleName, mod, installDir) {
|
||||
const ops = Array.isArray(mod && mod.operations) ? mod.operations : [];
|
||||
let merged = false;
|
||||
|
||||
for (const op of ops) {
|
||||
if (!op || op.type !== "copy_dir") continue;
|
||||
const target = typeof op.target === "string" ? op.target : "";
|
||||
if (!target) continue;
|
||||
|
||||
const targetDir = path.join(installDir, target);
|
||||
const hooksFile = path.join(targetDir, "hooks", "hooks.json");
|
||||
if (!fs.existsSync(hooksFile)) continue;
|
||||
|
||||
let hooksConfig;
|
||||
try {
|
||||
hooksConfig = JSON.parse(fs.readFileSync(hooksFile, "utf8"));
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
if (mergeHooksToSettings(moduleName, hooksConfig, installDir, targetDir)) merged = true;
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
async function dirExists(p) {
|
||||
try {
|
||||
return (await fs.promises.stat(p)).isDirectory();
|
||||
@@ -305,7 +501,8 @@ async function updateInstalledModules(installDir, tag, config, dryRun) {
|
||||
await fs.promises.mkdir(installDir, { recursive: true });
|
||||
for (const name of toUpdate) {
|
||||
process.stdout.write(`Updating module: ${name}\n`);
|
||||
await applyModule(name, config, repoRoot, installDir, true);
|
||||
const r = await applyModule(name, config, repoRoot, installDir, true);
|
||||
upsertModuleStatus(installDir, r);
|
||||
}
|
||||
} finally {
|
||||
if (tmp) await rmTree(tmp);
|
||||
@@ -513,11 +710,12 @@ async function extractTarGz(archivePath, destDir) {
|
||||
}
|
||||
|
||||
async function copyFile(src, dst, force) {
|
||||
if (!force && fs.existsSync(dst)) return;
|
||||
if (!force && fs.existsSync(dst)) return false;
|
||||
await fs.promises.mkdir(path.dirname(dst), { recursive: true });
|
||||
await fs.promises.copyFile(src, dst);
|
||||
const st = await fs.promises.stat(src);
|
||||
await fs.promises.chmod(dst, st.mode);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function copyDirRecursive(src, dst, force) {
|
||||
@@ -534,6 +732,7 @@ async function copyDirRecursive(src, dst, force) {
|
||||
}
|
||||
|
||||
async function mergeDir(src, installDir, force) {
|
||||
const installed = [];
|
||||
const subdirs = await fs.promises.readdir(src, { withFileTypes: true });
|
||||
for (const d of subdirs) {
|
||||
if (!d.isDirectory()) continue;
|
||||
@@ -543,9 +742,11 @@ async function mergeDir(src, installDir, force) {
|
||||
const entries = await fs.promises.readdir(srcSub, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (!e.isFile()) continue;
|
||||
await copyFile(path.join(srcSub, e.name), path.join(dstSub, e.name), force);
|
||||
const didCopy = await copyFile(path.join(srcSub, e.name), path.join(dstSub, e.name), force);
|
||||
if (didCopy) installed.push(`${d.name}/${e.name}`);
|
||||
}
|
||||
}
|
||||
return installed;
|
||||
}
|
||||
|
||||
function runInstallSh(repoRoot, installDir) {
|
||||
@@ -577,33 +778,154 @@ async function applyModule(moduleName, config, repoRoot, installDir, force) {
|
||||
const mod = config && config.modules && config.modules[moduleName];
|
||||
if (!mod) throw new Error(`Unknown module: ${moduleName}`);
|
||||
const ops = Array.isArray(mod.operations) ? mod.operations : [];
|
||||
const result = {
|
||||
module: moduleName,
|
||||
status: "success",
|
||||
operations: [],
|
||||
installed_at: new Date().toISOString(),
|
||||
};
|
||||
const mergeDirFiles = [];
|
||||
|
||||
for (const op of ops) {
|
||||
const type = op && op.type;
|
||||
if (type === "copy_file") {
|
||||
await copyFile(
|
||||
path.join(repoRoot, op.source),
|
||||
path.join(installDir, op.target),
|
||||
force
|
||||
);
|
||||
} else if (type === "copy_dir") {
|
||||
await copyDirRecursive(
|
||||
path.join(repoRoot, op.source),
|
||||
path.join(installDir, op.target),
|
||||
force
|
||||
);
|
||||
} else if (type === "merge_dir") {
|
||||
await mergeDir(path.join(repoRoot, op.source), installDir, force);
|
||||
} else if (type === "run_command") {
|
||||
const cmd = typeof op.command === "string" ? op.command.trim() : "";
|
||||
if (cmd !== "bash install.sh") {
|
||||
throw new Error(`Refusing run_command: ${cmd || "(empty)"}`);
|
||||
try {
|
||||
if (type === "copy_file") {
|
||||
await copyFile(path.join(repoRoot, op.source), path.join(installDir, op.target), force);
|
||||
} else if (type === "copy_dir") {
|
||||
await copyDirRecursive(path.join(repoRoot, op.source), path.join(installDir, op.target), force);
|
||||
} else if (type === "merge_dir") {
|
||||
mergeDirFiles.push(...(await mergeDir(path.join(repoRoot, op.source), installDir, force)));
|
||||
} else if (type === "run_command") {
|
||||
const cmd = typeof op.command === "string" ? op.command.trim() : "";
|
||||
if (cmd !== "bash install.sh") {
|
||||
throw new Error(`Refusing run_command: ${cmd || "(empty)"}`);
|
||||
}
|
||||
await runInstallSh(repoRoot, installDir);
|
||||
} else {
|
||||
throw new Error(`Unsupported operation type: ${type}`);
|
||||
}
|
||||
await runInstallSh(repoRoot, installDir);
|
||||
} else {
|
||||
throw new Error(`Unsupported operation type: ${type}`);
|
||||
result.operations.push({ type, status: "success" });
|
||||
} catch (err) {
|
||||
result.status = "failed";
|
||||
result.operations.push({
|
||||
type,
|
||||
status: "failed",
|
||||
error: err && err.message ? err.message : String(err),
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
if (mergeDirFiles.length) result.merge_dir_files = mergeDirFiles;
|
||||
|
||||
try {
|
||||
if (mergeModuleHooks(moduleName, mod, installDir)) {
|
||||
result.has_hooks = true;
|
||||
result.operations.push({ type: "merge_hooks", status: "success" });
|
||||
}
|
||||
} catch (err) {
|
||||
result.operations.push({
|
||||
type: "merge_hooks",
|
||||
status: "failed",
|
||||
error: err && err.message ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function tryRemoveEmptyDir(p) {
|
||||
try {
|
||||
const entries = await fs.promises.readdir(p);
|
||||
if (!entries.length) await fs.promises.rmdir(p);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function removePathIfExists(p) {
|
||||
if (!fs.existsSync(p)) return;
|
||||
const st = await fs.promises.lstat(p);
|
||||
if (st.isDirectory()) {
|
||||
await rmTree(p);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await fs.promises.unlink(p);
|
||||
} catch (err) {
|
||||
if (!err || err.code !== "ENOENT") throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function uninstallModule(moduleName, config, repoRoot, installDir, dryRun) {
|
||||
const mod = config && config.modules && config.modules[moduleName];
|
||||
if (!mod) throw new Error(`Unknown module: ${moduleName}`);
|
||||
const ops = Array.isArray(mod.operations) ? mod.operations : [];
|
||||
const status = loadInstalledStatus(installDir);
|
||||
const moduleStatus = (status.modules && status.modules[moduleName]) || {};
|
||||
const recordedMerge = Array.isArray(moduleStatus.merge_dir_files) ? moduleStatus.merge_dir_files : null;
|
||||
|
||||
for (const op of ops) {
|
||||
const type = op && op.type;
|
||||
if (type === "copy_file" || type === "copy_dir") {
|
||||
const target = typeof op.target === "string" ? op.target : "";
|
||||
if (!target) continue;
|
||||
const p = path.join(installDir, target);
|
||||
if (dryRun) process.stdout.write(`- remove ${p}\n`);
|
||||
else await removePathIfExists(p);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (type !== "merge_dir") continue;
|
||||
const source = typeof op.source === "string" ? op.source : "";
|
||||
if (!source) continue;
|
||||
|
||||
if (recordedMerge && recordedMerge.length) {
|
||||
for (const rel of recordedMerge) {
|
||||
const parts = String(rel).split("/").filter(Boolean);
|
||||
if (parts.includes("..")) continue;
|
||||
const p = path.join(installDir, ...parts);
|
||||
if (dryRun) process.stdout.write(`- remove ${p}\n`);
|
||||
else {
|
||||
await removePathIfExists(p);
|
||||
await tryRemoveEmptyDir(path.dirname(p));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const srcDir = path.join(repoRoot, source);
|
||||
if (!(await dirExists(srcDir))) continue;
|
||||
const subdirs = await fs.promises.readdir(srcDir, { withFileTypes: true });
|
||||
for (const d of subdirs) {
|
||||
if (!d.isDirectory()) continue;
|
||||
const srcSub = path.join(srcDir, d.name);
|
||||
const entries = await fs.promises.readdir(srcSub, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (!e.isFile()) continue;
|
||||
const dst = path.join(installDir, d.name, e.name);
|
||||
if (!fs.existsSync(dst)) continue;
|
||||
try {
|
||||
const [srcBuf, dstBuf] = await Promise.all([
|
||||
fs.promises.readFile(path.join(srcSub, e.name)),
|
||||
fs.promises.readFile(dst),
|
||||
]);
|
||||
if (Buffer.compare(srcBuf, dstBuf) !== 0) continue;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
if (dryRun) process.stdout.write(`- remove ${dst}\n`);
|
||||
else {
|
||||
await removePathIfExists(dst);
|
||||
await tryRemoveEmptyDir(path.dirname(dst));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dryRun) return;
|
||||
unmergeHooksFromSettings(moduleName, installDir);
|
||||
deleteModuleStatus(installDir, moduleName);
|
||||
}
|
||||
|
||||
async function installSelected(picks, tag, config, installDir, force, dryRun) {
|
||||
@@ -647,7 +969,8 @@ async function installSelected(picks, tag, config, installDir, force, dryRun) {
|
||||
}
|
||||
if (p.kind === "module") {
|
||||
process.stdout.write(`Installing module: ${p.moduleName}\n`);
|
||||
await applyModule(p.moduleName, config, repoRoot, installDir, force);
|
||||
const r = await applyModule(p.moduleName, config, repoRoot, installDir, force);
|
||||
upsertModuleStatus(installDir, r);
|
||||
continue;
|
||||
}
|
||||
if (p.kind === "skill") {
|
||||
@@ -672,8 +995,77 @@ async function main() {
|
||||
}
|
||||
|
||||
const installDir = expandHome(args.installDir);
|
||||
if (args.command !== "install" && args.command !== "uninstall") {
|
||||
throw new Error(`Unknown command: ${args.command}`);
|
||||
}
|
||||
if (args.list && args.update) throw new Error("Cannot combine --list and --update");
|
||||
|
||||
if (args.command === "uninstall") {
|
||||
const config = readLocalConfig();
|
||||
const repoRoot = repoRootFromHere();
|
||||
const fromStatus = readInstalledModuleNamesFromStatus(installDir);
|
||||
const installed = fromStatus || (await detectInstalledModuleNames(config, repoRoot, installDir));
|
||||
const installedSet = new Set(installed);
|
||||
|
||||
let toRemove = [];
|
||||
if (args.module) {
|
||||
const v = String(args.module).trim();
|
||||
if (v.toLowerCase() === "all") {
|
||||
toRemove = installed;
|
||||
} else {
|
||||
toRemove = v
|
||||
.split(",")
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
} else {
|
||||
const modules = (config && config.modules) || {};
|
||||
const items = [];
|
||||
for (const [name, mod] of Object.entries(modules)) {
|
||||
if (!installedSet.has(name)) continue;
|
||||
const desc = mod && typeof mod.description === "string" ? mod.description : "";
|
||||
items.push({
|
||||
id: `module:${name}`,
|
||||
label: `module:${name}${desc ? ` - ${desc}` : ""}`,
|
||||
kind: "module",
|
||||
moduleName: name,
|
||||
});
|
||||
}
|
||||
if (!items.length) {
|
||||
process.stdout.write(`No installed modules found in ${installDir}.\n`);
|
||||
return;
|
||||
}
|
||||
const picks = await promptMultiSelect(items, "myclaude uninstall");
|
||||
toRemove = picks.map((p) => p.moduleName);
|
||||
}
|
||||
|
||||
toRemove = toRemove.filter((m) => installedSet.has(m));
|
||||
if (!toRemove.length) {
|
||||
process.stdout.write("Nothing selected.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!args.yes && !args.dryRun) {
|
||||
if (!process.stdin.isTTY) {
|
||||
throw new Error("No TTY. Use -y/--yes to skip confirmation.");
|
||||
}
|
||||
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
||||
const answer = await new Promise((resolve) => rl.question("Confirm uninstall? (y/N): ", resolve));
|
||||
rl.close();
|
||||
if (String(answer).trim().toLowerCase() !== "y") {
|
||||
process.stdout.write("Cancelled.\n");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (const name of toRemove) {
|
||||
process.stdout.write(`Uninstalling module: ${name}\n`);
|
||||
await uninstallModule(name, config, repoRoot, installDir, args.dryRun);
|
||||
}
|
||||
process.stdout.write("Done.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
let tag = args.tag;
|
||||
if (!tag) {
|
||||
try {
|
||||
|
||||
8
codeagent-wrapper/.github/workflows/ci.yml
vendored
8
codeagent-wrapper/.github/workflows/ci.yml
vendored
@@ -17,6 +17,9 @@ jobs:
|
||||
go-version: ["1.21", "1.22"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
@@ -25,11 +28,16 @@ jobs:
|
||||
run: make test
|
||||
- name: Build
|
||||
run: make build
|
||||
- name: Verify version
|
||||
run: ./codeagent-wrapper --version
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
GO ?= go
|
||||
VERSION := $(shell git describe --tags --always --dirty 2>/dev/null || echo dev)
|
||||
LDFLAGS := -ldflags "-X codeagent-wrapper/internal/app.version=$(VERSION)"
|
||||
|
||||
TOOLS_BIN := $(CURDIR)/bin
|
||||
TOOLCHAIN ?= go1.22.0
|
||||
@@ -11,8 +13,7 @@ STATICCHECK := $(TOOLS_BIN)/staticcheck
|
||||
.PHONY: build test lint clean install
|
||||
|
||||
build:
|
||||
$(GO) build -o codeagent ./cmd/codeagent
|
||||
$(GO) build -o codeagent-wrapper ./cmd/codeagent-wrapper
|
||||
$(GO) build $(LDFLAGS) -o codeagent-wrapper ./cmd/codeagent-wrapper
|
||||
|
||||
test:
|
||||
$(GO) test ./...
|
||||
@@ -33,5 +34,4 @@ clean:
|
||||
@python3 -c 'import glob, os; paths=["codeagent","codeagent.exe","codeagent-wrapper","codeagent-wrapper.exe","coverage.out","cover.out","coverage.html"]; paths += glob.glob("coverage*.out") + glob.glob("cover_*.out") + glob.glob("*.test"); [os.remove(p) for p in paths if os.path.exists(p)]'
|
||||
|
||||
install:
|
||||
$(GO) install ./cmd/codeagent
|
||||
$(GO) install ./cmd/codeagent-wrapper
|
||||
$(GO) install $(LDFLAGS) ./cmd/codeagent-wrapper
|
||||
|
||||
@@ -150,3 +150,8 @@ make test
|
||||
make lint
|
||||
make clean
|
||||
```
|
||||
|
||||
## 故障排查
|
||||
|
||||
- macOS 下如果看到临时目录相关的 `permission denied`(例如临时可执行文件无法在 `/var/folders/.../T` 执行),可设置一个可执行的临时目录:`CODEAGENT_TMPDIR=$HOME/.codeagent/tmp`。
|
||||
- `claude` 后端的 `base_url/api_key`(来自 `~/.codeagent/models.json`)会注入到子进程环境变量:`ANTHROPIC_BASE_URL` / `ANTHROPIC_API_KEY`。若 `base_url` 指向本地代理(如 `localhost:23001`),请确认代理进程在运行。
|
||||
|
||||
@@ -9,8 +9,9 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
var version = "dev"
|
||||
|
||||
const (
|
||||
version = "6.1.2"
|
||||
defaultWorkdir = "."
|
||||
defaultTimeout = 7200 // seconds (2 hours)
|
||||
defaultCoverageTarget = 90.0
|
||||
|
||||
@@ -3,6 +3,7 @@ package wrapper
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
@@ -29,6 +30,18 @@ func BenchmarkConfigParse_ParseArgs(b *testing.B) {
|
||||
b.Setenv("HOME", home)
|
||||
b.Setenv("USERPROFILE", home)
|
||||
|
||||
configDir := filepath.Join(home, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(configDir, "models.json"), []byte(`{
|
||||
"agents": {
|
||||
"develop": { "backend": "codex", "model": "gpt-test" }
|
||||
}
|
||||
}`), 0o644); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
b.Cleanup(config.ResetModelsConfigCacheForTest)
|
||||
|
||||
|
||||
@@ -168,6 +168,7 @@ func newCleanupCommand() *cobra.Command {
|
||||
}
|
||||
|
||||
func runWithLoggerAndCleanup(fn func() int) (exitCode int) {
|
||||
ensureExecutableTempDir()
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to initialize logger: %v\n", err)
|
||||
@@ -254,7 +255,11 @@ func buildSingleConfig(cmd *cobra.Command, args []string, rawArgv []string, opts
|
||||
var resolvedBackend, resolvedModel, resolvedPromptFile, resolvedReasoning string
|
||||
if agentName != "" {
|
||||
var resolvedYolo bool
|
||||
resolvedBackend, resolvedModel, resolvedPromptFile, resolvedReasoning, _, _, resolvedYolo = config.ResolveAgentConfig(agentName)
|
||||
var err error
|
||||
resolvedBackend, resolvedModel, resolvedPromptFile, resolvedReasoning, _, _, resolvedYolo, err = config.ResolveAgentConfig(agentName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to resolve agent %q: %w", agentName, err)
|
||||
}
|
||||
yolo = resolvedYolo
|
||||
}
|
||||
|
||||
|
||||
@@ -567,8 +567,7 @@ func TestExecutorParallelLogIsolation(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestConcurrentExecutorParallelLogIsolationAndClosure(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
oldArgs := os.Args
|
||||
os.Args = []string{wrapperName}
|
||||
@@ -929,8 +928,7 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
t.Run("TestConcurrentTaskLoggerFailure", func(t *testing.T) {
|
||||
// Create a writable temp dir for the main logger, then flip TMPDIR to a read-only
|
||||
// location so task-specific loggers fail to open.
|
||||
writable := t.TempDir()
|
||||
t.Setenv("TMPDIR", writable)
|
||||
writable := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
mainLogger, err := NewLoggerWithSuffix("shared-main")
|
||||
if err != nil {
|
||||
@@ -943,11 +941,11 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
_ = os.Remove(mainLogger.Path())
|
||||
})
|
||||
|
||||
noWrite := filepath.Join(writable, "ro")
|
||||
if err := os.Mkdir(noWrite, 0o500); err != nil {
|
||||
t.Fatalf("failed to create read-only temp dir: %v", err)
|
||||
notDir := filepath.Join(writable, "not-a-dir")
|
||||
if err := os.WriteFile(notDir, []byte("x"), 0o644); err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
t.Setenv("TMPDIR", noWrite)
|
||||
setTempDirEnv(t, notDir)
|
||||
|
||||
taskA := nextExecutorTestTaskID("shared-a")
|
||||
taskB := nextExecutorTestTaskID("shared-b")
|
||||
@@ -1011,8 +1009,7 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("TestSanitizeTaskID", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
orig := runCodexTaskFn
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
@@ -1081,8 +1078,7 @@ func TestExecutorSharedLogFalseWhenCustomLogPath(t *testing.T) {
|
||||
_ = devNull.Close()
|
||||
})
|
||||
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
// Setup: 创建主 logger
|
||||
mainLogger, err := NewLoggerWithSuffix("shared-main")
|
||||
@@ -1098,11 +1094,11 @@ func TestExecutorSharedLogFalseWhenCustomLogPath(t *testing.T) {
|
||||
// 模拟场景:task logger 创建失败(通过设置只读的 TMPDIR),
|
||||
// 回退到主 logger(handle.shared=true),
|
||||
// 但 runCodexTaskFn 返回自定义的 LogPath(不等于主 logger 的路径)
|
||||
roDir := filepath.Join(tempDir, "ro")
|
||||
if err := os.Mkdir(roDir, 0o500); err != nil {
|
||||
t.Fatalf("failed to create read-only dir: %v", err)
|
||||
notDir := filepath.Join(tempDir, "not-a-dir")
|
||||
if err := os.WriteFile(notDir, []byte("x"), 0o644); err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
t.Setenv("TMPDIR", roDir)
|
||||
setTempDirEnv(t, notDir)
|
||||
|
||||
orig := runCodexTaskFn
|
||||
customLogPath := "/custom/path/to.log"
|
||||
|
||||
@@ -550,10 +550,8 @@ func TestRunNonParallelOutputsIncludeLogPathsIntegration(t *testing.T) {
|
||||
os.Args = []string{"codeagent-wrapper", "integration-log-check"}
|
||||
stdinReader = strings.NewReader("")
|
||||
isTerminalFn = func() bool { return true }
|
||||
codexCommand = "echo"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string {
|
||||
return []string{`{"type":"thread.started","thread_id":"integration-session"}` + "\n" + `{"type":"item.completed","item":{"type":"agent_message","text":"done"}}`}
|
||||
}
|
||||
codexCommand = createFakeCodexScript(t, "integration-session", "done")
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} }
|
||||
|
||||
var exitCode int
|
||||
stderr := captureStderr(t, func() {
|
||||
@@ -725,20 +723,18 @@ func TestRunConcurrentSpeedupBenchmark(t *testing.T) {
|
||||
layers := [][]TaskSpec{tasks}
|
||||
|
||||
serialStart := time.Now()
|
||||
for _, task := range tasks {
|
||||
_ = runCodexTaskFn(task, 5)
|
||||
}
|
||||
_ = executeConcurrentWithContext(nil, layers, 5, 1)
|
||||
serialElapsed := time.Since(serialStart)
|
||||
|
||||
concurrentStart := time.Now()
|
||||
_ = executeConcurrent(layers, 5)
|
||||
_ = executeConcurrentWithContext(nil, layers, 5, 0)
|
||||
concurrentElapsed := time.Since(concurrentStart)
|
||||
|
||||
if concurrentElapsed >= serialElapsed/5 {
|
||||
t.Fatalf("expected concurrent time <20%% of serial, serial=%v concurrent=%v", serialElapsed, concurrentElapsed)
|
||||
}
|
||||
ratio := float64(concurrentElapsed) / float64(serialElapsed)
|
||||
t.Logf("speedup ratio (concurrent/serial)=%.3f", ratio)
|
||||
if concurrentElapsed >= serialElapsed/2 {
|
||||
t.Fatalf("expected concurrent time <50%% of serial, serial=%v concurrent=%v", serialElapsed, concurrentElapsed)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRunStartupCleanupRemovesOrphansEndToEnd(t *testing.T) {
|
||||
@@ -830,15 +826,20 @@ func TestRunCleanupFlagEndToEnd_Success(t *testing.T) {
|
||||
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
staleA := createTempLog(t, tempDir, "codeagent-wrapper-2100.log")
|
||||
staleB := createTempLog(t, tempDir, "codeagent-wrapper-2200-extra.log")
|
||||
keeper := createTempLog(t, tempDir, "codeagent-wrapper-2300.log")
|
||||
basePID := os.Getpid()
|
||||
stalePID1 := basePID + 10000
|
||||
stalePID2 := basePID + 11000
|
||||
keeperPID := basePID + 12000
|
||||
|
||||
staleA := createTempLog(t, tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", stalePID1))
|
||||
staleB := createTempLog(t, tempDir, fmt.Sprintf("codeagent-wrapper-%d-extra.log", stalePID2))
|
||||
keeper := createTempLog(t, tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", keeperPID))
|
||||
|
||||
stubProcessRunning(t, func(pid int) bool {
|
||||
return pid == 2300 || pid == os.Getpid()
|
||||
return pid == keeperPID || pid == basePID
|
||||
})
|
||||
stubProcessStartTime(t, func(pid int) time.Time {
|
||||
if pid == 2300 || pid == os.Getpid() {
|
||||
if pid == keeperPID || pid == basePID {
|
||||
return time.Now().Add(-1 * time.Hour)
|
||||
}
|
||||
return time.Time{}
|
||||
@@ -868,10 +869,10 @@ func TestRunCleanupFlagEndToEnd_Success(t *testing.T) {
|
||||
if !strings.Contains(output, "Files kept: 1") {
|
||||
t.Fatalf("missing 'Files kept: 1' in output: %q", output)
|
||||
}
|
||||
if !strings.Contains(output, "codeagent-wrapper-2100.log") || !strings.Contains(output, "codeagent-wrapper-2200-extra.log") {
|
||||
if !strings.Contains(output, fmt.Sprintf("codeagent-wrapper-%d.log", stalePID1)) || !strings.Contains(output, fmt.Sprintf("codeagent-wrapper-%d-extra.log", stalePID2)) {
|
||||
t.Fatalf("missing deleted file names in output: %q", output)
|
||||
}
|
||||
if !strings.Contains(output, "codeagent-wrapper-2300.log") {
|
||||
if !strings.Contains(output, fmt.Sprintf("codeagent-wrapper-%d.log", keeperPID)) {
|
||||
t.Fatalf("missing kept file names in output: %q", output)
|
||||
}
|
||||
|
||||
|
||||
@@ -643,10 +643,24 @@ func (f *fakeCmd) StdinContents() string {
|
||||
|
||||
func createFakeCodexScript(t *testing.T, threadID, message string) string {
|
||||
t.Helper()
|
||||
scriptPath := filepath.Join(t.TempDir(), "codex.sh")
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Add small sleep to ensure parser goroutine has time to read stdout before
|
||||
// the process exits and closes the pipe. This prevents race conditions in CI
|
||||
// where fast shell script execution can close stdout before parsing completes.
|
||||
if runtime.GOOS == "windows" {
|
||||
scriptPath := filepath.Join(tempDir, "codex.bat")
|
||||
script := fmt.Sprintf("@echo off\r\n"+
|
||||
"echo {\"type\":\"thread.started\",\"thread_id\":\"%s\"}\r\n"+
|
||||
"echo {\"type\":\"item.completed\",\"item\":{\"type\":\"agent_message\",\"text\":\"%s\"}}\r\n"+
|
||||
"exit /b 0\r\n", threadID, message)
|
||||
if err := os.WriteFile(scriptPath, []byte(script), 0o755); err != nil {
|
||||
t.Fatalf("failed to create fake codex script: %v", err)
|
||||
}
|
||||
return scriptPath
|
||||
}
|
||||
|
||||
scriptPath := filepath.Join(tempDir, "codex.sh")
|
||||
script := fmt.Sprintf(`#!/bin/sh
|
||||
printf '%%s\n' '{"type":"thread.started","thread_id":"%s"}'
|
||||
printf '%%s\n' '{"type":"item.completed","item":{"type":"agent_message","text":"%s"}}'
|
||||
@@ -1392,6 +1406,24 @@ func TestBackendParseArgs_PromptFileFlag(t *testing.T) {
|
||||
func TestBackendParseArgs_PromptFileOverridesAgent(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(config.ResetModelsConfigCacheForTest)
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
|
||||
configDir := filepath.Join(home, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(configDir, "models.json"), []byte(`{
|
||||
"agents": {
|
||||
"develop": { "backend": "codex", "model": "gpt-test" }
|
||||
}
|
||||
}`), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
os.Args = []string{"codeagent-wrapper", "--prompt-file", "/tmp/custom.md", "--agent", "develop", "task"}
|
||||
cfg, err := parseArgs()
|
||||
if err != nil {
|
||||
@@ -1916,7 +1948,7 @@ func TestRun_PassesReasoningEffortToTaskSpec(t *testing.T) {
|
||||
func TestRun_NoOutputMessage_ReturnsExitCode1AndWritesStderr(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
cleanupLogsFn = func() (CleanupStats, error) { return CleanupStats{}, nil }
|
||||
t.Setenv("TMPDIR", t.TempDir())
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
selectBackendFn = func(name string) (Backend, error) {
|
||||
return testBackend{name: name, command: "echo"}, nil
|
||||
@@ -2067,8 +2099,7 @@ func TestRunBuildCodexArgs_ResumeMode_EmptySessionHandledGracefully(t *testing.T
|
||||
|
||||
func TestRunBuildCodexArgs_BypassSandboxEnvTrue(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -2712,8 +2743,7 @@ func TestTailBufferWrite(t *testing.T) {
|
||||
|
||||
func TestRunLogFunctions(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -2760,8 +2790,7 @@ func TestLoggerLogDropOnDone(t *testing.T) {
|
||||
|
||||
func TestLoggerLogAfterClose(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -2924,13 +2953,10 @@ func TestRunCodexTask_StartError(t *testing.T) {
|
||||
|
||||
func TestRunCodexTask_WithEcho(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
codexCommand = "echo"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{targetArg} }
|
||||
codexCommand = createFakeCodexScript(t, "test-session", "Test output")
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} }
|
||||
|
||||
jsonOutput := `{"type":"thread.started","thread_id":"test-session"}
|
||||
{"type":"item.completed","item":{"type":"agent_message","text":"Test output"}}`
|
||||
|
||||
res := runCodexTask(TaskSpec{Task: jsonOutput}, false, 10)
|
||||
res := runCodexTask(TaskSpec{Task: "ignored"}, false, 10)
|
||||
if res.ExitCode != 0 || res.Message != "Test output" || res.SessionID != "test-session" {
|
||||
t.Fatalf("unexpected result: %+v", res)
|
||||
}
|
||||
@@ -3010,13 +3036,10 @@ func TestRunCodexTask_LogPathWithActiveLogger(t *testing.T) {
|
||||
}
|
||||
setLogger(logger)
|
||||
|
||||
codexCommand = "echo"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{targetArg} }
|
||||
codexCommand = createFakeCodexScript(t, "fake-thread", "ok")
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} }
|
||||
|
||||
jsonOutput := `{"type":"thread.started","thread_id":"fake-thread"}
|
||||
{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`
|
||||
|
||||
result := runCodexTask(TaskSpec{Task: jsonOutput}, false, 5)
|
||||
result := runCodexTask(TaskSpec{Task: "ignored"}, false, 5)
|
||||
if result.LogPath != logger.Path() {
|
||||
t.Fatalf("LogPath = %q, want %q", result.LogPath, logger.Path())
|
||||
}
|
||||
@@ -3028,13 +3051,10 @@ func TestRunCodexTask_LogPathWithActiveLogger(t *testing.T) {
|
||||
func TestRunCodexTask_LogPathWithTempLogger(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
codexCommand = "echo"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{targetArg} }
|
||||
codexCommand = createFakeCodexScript(t, "temp-thread", "temp")
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} }
|
||||
|
||||
jsonOutput := `{"type":"thread.started","thread_id":"temp-thread"}
|
||||
{"type":"item.completed","item":{"type":"agent_message","text":"temp"}}`
|
||||
|
||||
result := runCodexTask(TaskSpec{Task: jsonOutput}, true, 5)
|
||||
result := runCodexTask(TaskSpec{Task: "ignored"}, true, 5)
|
||||
t.Cleanup(func() {
|
||||
if result.LogPath != "" {
|
||||
os.Remove(result.LogPath)
|
||||
@@ -3080,10 +3100,19 @@ func TestRunCodexTask_LogPathOnStartError(t *testing.T) {
|
||||
|
||||
func TestRunCodexTask_NoMessage(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
codexCommand = "echo"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{targetArg} }
|
||||
jsonOutput := `{"type":"thread.started","thread_id":"test-session"}`
|
||||
res := runCodexTask(TaskSpec{Task: jsonOutput}, false, 10)
|
||||
|
||||
fake := newFakeCmd(fakeCmdConfig{
|
||||
StdoutPlan: []fakeStdoutEvent{
|
||||
{Data: `{"type":"thread.started","thread_id":"test-session"}` + "\n"},
|
||||
},
|
||||
WaitDelay: 5 * time.Millisecond,
|
||||
})
|
||||
restore := executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner { return fake })
|
||||
t.Cleanup(restore)
|
||||
|
||||
codexCommand = "fake-cmd"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} }
|
||||
res := runCodexTask(TaskSpec{Task: "ignored"}, false, 10)
|
||||
if res.ExitCode != 1 || res.Error == "" {
|
||||
t.Fatalf("expected error for missing agent_message, got %+v", res)
|
||||
}
|
||||
@@ -3208,20 +3237,36 @@ func TestRunCodexProcess(t *testing.T) {
|
||||
|
||||
func TestRunSilentMode(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tmpDir := t.TempDir()
|
||||
setTempDirEnv(t, tmpDir)
|
||||
jsonOutput := `{"type":"thread.started","thread_id":"silent-session"}
|
||||
{"type":"item.completed","item":{"type":"agent_message","text":"quiet"}}`
|
||||
codexCommand = "echo"
|
||||
codexCommand = "fake-cmd"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{targetArg} }
|
||||
_ = executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return newFakeCmd(fakeCmdConfig{
|
||||
StdoutPlan: []fakeStdoutEvent{{Data: jsonOutput + "\n"}},
|
||||
})
|
||||
})
|
||||
|
||||
capture := func(silent bool) string {
|
||||
oldStderr := os.Stderr
|
||||
r, w, _ := os.Pipe()
|
||||
os.Stderr = w
|
||||
res := runCodexTask(TaskSpec{Task: jsonOutput}, silent, 10)
|
||||
if res.ExitCode != 0 {
|
||||
t.Fatalf("unexpected exitCode %d", res.ExitCode)
|
||||
r, w, err := os.Pipe()
|
||||
if err != nil {
|
||||
t.Fatalf("os.Pipe() error = %v", err)
|
||||
}
|
||||
w.Close()
|
||||
os.Stderr = w
|
||||
defer func() {
|
||||
os.Stderr = oldStderr
|
||||
_ = w.Close()
|
||||
_ = r.Close()
|
||||
}()
|
||||
|
||||
res := runCodexTask(TaskSpec{Task: "ignored"}, silent, 10)
|
||||
if res.ExitCode != 0 {
|
||||
t.Fatalf("unexpected exitCode %d: %s", res.ExitCode, res.Error)
|
||||
}
|
||||
_ = w.Close()
|
||||
os.Stderr = oldStderr
|
||||
var buf bytes.Buffer
|
||||
if _, err := io.Copy(&buf, r); err != nil {
|
||||
@@ -3579,6 +3624,7 @@ do two`)
|
||||
}
|
||||
|
||||
func TestParallelFlag(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
oldArgs := os.Args
|
||||
defer func() { os.Args = oldArgs }()
|
||||
|
||||
@@ -3588,14 +3634,10 @@ id: T1
|
||||
---CONTENT---
|
||||
test`
|
||||
stdinReader = strings.NewReader(jsonInput)
|
||||
defer func() { stdinReader = os.Stdin }()
|
||||
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 0, Message: "test output"}
|
||||
}
|
||||
defer func() {
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult { return runCodexTask(task, true, timeout) }
|
||||
}()
|
||||
|
||||
exitCode := run()
|
||||
if exitCode != 0 {
|
||||
@@ -4211,8 +4253,7 @@ func TestRun_ExplicitStdinEmpty(t *testing.T) {
|
||||
|
||||
func TestRun_ExplicitStdinReadError(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
|
||||
var logOutput string
|
||||
@@ -4308,8 +4349,7 @@ func TestRun_ExplicitStdinSuccess(t *testing.T) {
|
||||
|
||||
func TestRun_PipedTaskReadError(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
|
||||
var logOutput string
|
||||
@@ -4362,8 +4402,7 @@ func TestRun_PipedTaskSuccess(t *testing.T) {
|
||||
|
||||
func TestRun_LoggerLifecycle(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
|
||||
stdout := captureStdoutPipe()
|
||||
@@ -4411,8 +4450,7 @@ func TestRun_LoggerRemovedOnSignal(t *testing.T) {
|
||||
// Set shorter delays for faster test
|
||||
_ = executor.SetForceKillDelay(1)
|
||||
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
logPath := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
|
||||
scriptPath := filepath.Join(tempDir, "sleepy-codex.sh")
|
||||
@@ -4466,10 +4504,8 @@ func TestRun_CleanupHookAlwaysCalled(t *testing.T) {
|
||||
called := false
|
||||
cleanupHook = func() { called = true }
|
||||
// Use a command that goes through normal flow, not --version which returns early
|
||||
restore := withBackend("echo", func(cfg *Config, targetArg string) []string {
|
||||
return []string{`{"type":"thread.started","thread_id":"x"}
|
||||
{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`}
|
||||
})
|
||||
scriptPath := createFakeCodexScript(t, "x", "ok")
|
||||
restore := withBackend(scriptPath, func(cfg *Config, targetArg string) []string { return []string{} })
|
||||
defer restore()
|
||||
os.Args = []string{"codeagent-wrapper", "task"}
|
||||
if exitCode := run(); exitCode != 0 {
|
||||
@@ -4696,16 +4732,13 @@ func TestBackendRunCoverage(t *testing.T) {
|
||||
func TestParallelLogPathInSerialMode(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
os.Args = []string{"codeagent-wrapper", "do-stuff"}
|
||||
stdinReader = strings.NewReader("")
|
||||
isTerminalFn = func() bool { return true }
|
||||
codexCommand = "echo"
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string {
|
||||
return []string{`{"type":"thread.started","thread_id":"cli-session"}` + "\n" + `{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`}
|
||||
}
|
||||
codexCommand = createFakeCodexScript(t, "cli-session", "ok")
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string { return []string{} }
|
||||
|
||||
var exitCode int
|
||||
stderr := captureStderr(t, func() {
|
||||
@@ -4729,9 +4762,8 @@ func TestRun_CLI_Success(t *testing.T) {
|
||||
stdinReader = strings.NewReader("")
|
||||
isTerminalFn = func() bool { return true }
|
||||
|
||||
restore := withBackend("echo", func(cfg *Config, targetArg string) []string {
|
||||
return []string{`{"type":"thread.started","thread_id":"cli-session"}` + "\n" + `{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`}
|
||||
})
|
||||
scriptPath := createFakeCodexScript(t, "cli-session", "ok")
|
||||
restore := withBackend(scriptPath, func(cfg *Config, targetArg string) []string { return []string{} })
|
||||
defer restore()
|
||||
|
||||
var exitCode int
|
||||
|
||||
46
codeagent-wrapper/internal/app/os_paths_test.go
Normal file
46
codeagent-wrapper/internal/app/os_paths_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseArgs_Workdir_OSPaths(t *testing.T) {
|
||||
oldArgv := os.Args
|
||||
t.Cleanup(func() { os.Args = oldArgv })
|
||||
|
||||
workdirs := []struct {
|
||||
name string
|
||||
path string
|
||||
}{
|
||||
{name: "windows drive forward slashes", path: "D:/repo/path"},
|
||||
{name: "windows drive backslashes", path: `C:\repo\path`},
|
||||
{name: "windows UNC", path: `\\server\share\repo`},
|
||||
{name: "unix absolute", path: "/home/user/repo"},
|
||||
{name: "relative", path: "./relative/repo"},
|
||||
}
|
||||
|
||||
for _, wd := range workdirs {
|
||||
t.Run("new mode: "+wd.name, func(t *testing.T) {
|
||||
os.Args = []string{"codeagent-wrapper", "task", wd.path}
|
||||
cfg, err := parseArgs()
|
||||
if err != nil {
|
||||
t.Fatalf("parseArgs() error: %v", err)
|
||||
}
|
||||
if cfg.Mode != "new" || cfg.Task != "task" || cfg.WorkDir != wd.path {
|
||||
t.Fatalf("cfg mismatch: got mode=%q task=%q workdir=%q, want mode=%q task=%q workdir=%q", cfg.Mode, cfg.Task, cfg.WorkDir, "new", "task", wd.path)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("resume mode: "+wd.name, func(t *testing.T) {
|
||||
os.Args = []string{"codeagent-wrapper", "resume", "sid-1", "task", wd.path}
|
||||
cfg, err := parseArgs()
|
||||
if err != nil {
|
||||
t.Fatalf("parseArgs() error: %v", err)
|
||||
}
|
||||
if cfg.Mode != "resume" || cfg.SessionID != "sid-1" || cfg.Task != "task" || cfg.WorkDir != wd.path {
|
||||
t.Fatalf("cfg mismatch: got mode=%q sid=%q task=%q workdir=%q, want mode=%q sid=%q task=%q workdir=%q", cfg.Mode, cfg.SessionID, cfg.Task, cfg.WorkDir, "resume", "sid-1", "task", wd.path)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
119
codeagent-wrapper/internal/app/stdin_mode_test.go
Normal file
119
codeagent-wrapper/internal/app/stdin_mode_test.go
Normal file
@@ -0,0 +1,119 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRunSingleMode_UseStdin_TargetArgAndTaskText(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("NewLogger(): %v", err)
|
||||
}
|
||||
setLogger(logger)
|
||||
t.Cleanup(func() { _ = closeLogger() })
|
||||
|
||||
type testCase struct {
|
||||
name string
|
||||
cfgTask string
|
||||
explicit bool
|
||||
stdinData string
|
||||
isTerminal bool
|
||||
|
||||
wantUseStdin bool
|
||||
wantTarget string
|
||||
wantTaskText string
|
||||
}
|
||||
|
||||
longTask := strings.Repeat("a", 801)
|
||||
|
||||
tests := []testCase{
|
||||
{
|
||||
name: "piped input forces stdin mode",
|
||||
cfgTask: "cli-task",
|
||||
stdinData: "piped task text",
|
||||
isTerminal: false,
|
||||
wantUseStdin: true,
|
||||
wantTarget: "-",
|
||||
wantTaskText: "piped task text",
|
||||
},
|
||||
{
|
||||
name: "explicit dash forces stdin mode",
|
||||
cfgTask: "-",
|
||||
explicit: true,
|
||||
stdinData: "explicit task text",
|
||||
isTerminal: true,
|
||||
wantUseStdin: true,
|
||||
wantTarget: "-",
|
||||
wantTaskText: "explicit task text",
|
||||
},
|
||||
{
|
||||
name: "special char backslash forces stdin mode",
|
||||
cfgTask: `C:\repo\file.go`,
|
||||
isTerminal: true,
|
||||
wantUseStdin: true,
|
||||
wantTarget: "-",
|
||||
wantTaskText: `C:\repo\file.go`,
|
||||
},
|
||||
{
|
||||
name: "length>800 forces stdin mode",
|
||||
cfgTask: longTask,
|
||||
isTerminal: true,
|
||||
wantUseStdin: true,
|
||||
wantTarget: "-",
|
||||
wantTaskText: longTask,
|
||||
},
|
||||
{
|
||||
name: "simple task uses argv target",
|
||||
cfgTask: "analyze code",
|
||||
isTerminal: true,
|
||||
wantUseStdin: false,
|
||||
wantTarget: "analyze code",
|
||||
wantTaskText: "analyze code",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var gotTarget string
|
||||
buildCodexArgsFn = func(cfg *Config, targetArg string) []string {
|
||||
gotTarget = targetArg
|
||||
return []string{targetArg}
|
||||
}
|
||||
|
||||
var gotTask TaskSpec
|
||||
runTaskFn = func(task TaskSpec, silent bool, timeout int) TaskResult {
|
||||
gotTask = task
|
||||
return TaskResult{ExitCode: 0, Message: "ok"}
|
||||
}
|
||||
|
||||
stdinReader = strings.NewReader(tt.stdinData)
|
||||
isTerminalFn = func() bool { return tt.isTerminal }
|
||||
|
||||
cfg := &Config{
|
||||
Mode: "new",
|
||||
Task: tt.cfgTask,
|
||||
WorkDir: defaultWorkdir,
|
||||
Backend: defaultBackendName,
|
||||
ExplicitStdin: tt.explicit,
|
||||
}
|
||||
|
||||
if code := runSingleMode(cfg, "codeagent-wrapper"); code != 0 {
|
||||
t.Fatalf("runSingleMode() = %d, want 0", code)
|
||||
}
|
||||
|
||||
if gotTarget != tt.wantTarget {
|
||||
t.Fatalf("targetArg = %q, want %q", gotTarget, tt.wantTarget)
|
||||
}
|
||||
if gotTask.UseStdin != tt.wantUseStdin {
|
||||
t.Fatalf("taskSpec.UseStdin = %v, want %v", gotTask.UseStdin, tt.wantUseStdin)
|
||||
}
|
||||
if gotTask.Task != tt.wantTaskText {
|
||||
t.Fatalf("taskSpec.Task = %q, want %q", gotTask.Task, tt.wantTaskText)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
134
codeagent-wrapper/internal/app/tmpdir.go
Normal file
134
codeagent-wrapper/internal/app/tmpdir.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const tmpDirEnvOverrideKey = "CODEAGENT_TMPDIR"
|
||||
|
||||
var tmpDirExecutableCheckFn = canExecuteInDir
|
||||
|
||||
func ensureExecutableTempDir() {
|
||||
// Windows doesn't execute scripts via shebang, and os.TempDir semantics differ.
|
||||
if runtime.GOOS == "windows" {
|
||||
return
|
||||
}
|
||||
|
||||
if override := strings.TrimSpace(os.Getenv(tmpDirEnvOverrideKey)); override != "" {
|
||||
if resolved, err := resolvePathWithTilde(override); err == nil {
|
||||
if err := os.MkdirAll(resolved, 0o700); err == nil {
|
||||
if ok, _ := tmpDirExecutableCheckFn(resolved); ok {
|
||||
setTempEnv(resolved)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
// Invalid override should not block execution; fall back to default behavior.
|
||||
}
|
||||
|
||||
current := currentTempDirFromEnv()
|
||||
if current == "" {
|
||||
current = "/tmp"
|
||||
}
|
||||
|
||||
ok, _ := tmpDirExecutableCheckFn(current)
|
||||
if ok {
|
||||
return
|
||||
}
|
||||
|
||||
fallback := defaultFallbackTempDir()
|
||||
if fallback == "" {
|
||||
return
|
||||
}
|
||||
if err := os.MkdirAll(fallback, 0o700); err != nil {
|
||||
return
|
||||
}
|
||||
if ok, _ := tmpDirExecutableCheckFn(fallback); !ok {
|
||||
return
|
||||
}
|
||||
|
||||
setTempEnv(fallback)
|
||||
fmt.Fprintf(os.Stderr, "INFO: temp dir is not executable; set TMPDIR=%s\n", fallback)
|
||||
}
|
||||
|
||||
func setTempEnv(dir string) {
|
||||
_ = os.Setenv("TMPDIR", dir)
|
||||
_ = os.Setenv("TMP", dir)
|
||||
_ = os.Setenv("TEMP", dir)
|
||||
}
|
||||
|
||||
func defaultFallbackTempDir() string {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || strings.TrimSpace(home) == "" {
|
||||
return ""
|
||||
}
|
||||
return filepath.Clean(filepath.Join(home, ".codeagent", "tmp"))
|
||||
}
|
||||
|
||||
func currentTempDirFromEnv() string {
|
||||
for _, k := range []string{"TMPDIR", "TMP", "TEMP"} {
|
||||
if v := strings.TrimSpace(os.Getenv(k)); v != "" {
|
||||
return v
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func resolvePathWithTilde(p string) (string, error) {
|
||||
p = strings.TrimSpace(p)
|
||||
if p == "" {
|
||||
return "", errors.New("empty path")
|
||||
}
|
||||
|
||||
if p == "~" || strings.HasPrefix(p, "~/") || strings.HasPrefix(p, "~\\") {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || strings.TrimSpace(home) == "" {
|
||||
if err == nil {
|
||||
err = errors.New("empty home directory")
|
||||
}
|
||||
return "", fmt.Errorf("resolve ~: %w", err)
|
||||
}
|
||||
if p == "~" {
|
||||
return home, nil
|
||||
}
|
||||
return filepath.Clean(home + p[1:]), nil
|
||||
}
|
||||
|
||||
return filepath.Clean(p), nil
|
||||
}
|
||||
|
||||
func canExecuteInDir(dir string) (bool, error) {
|
||||
dir = strings.TrimSpace(dir)
|
||||
if dir == "" {
|
||||
return false, errors.New("empty dir")
|
||||
}
|
||||
|
||||
f, err := os.CreateTemp(dir, "codeagent-tmp-exec-*")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
path := f.Name()
|
||||
defer func() { _ = os.Remove(path) }()
|
||||
|
||||
if _, err := f.WriteString("#!/bin/sh\nexit 0\n"); err != nil {
|
||||
_ = f.Close()
|
||||
return false, err
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := os.Chmod(path, 0o700); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := exec.Command(path).Run(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
96
codeagent-wrapper/internal/app/tmpdir_test.go
Normal file
96
codeagent-wrapper/internal/app/tmpdir_test.go
Normal file
@@ -0,0 +1,96 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEnsureExecutableTempDir_Override(t *testing.T) {
|
||||
restore := captureTempEnv()
|
||||
t.Cleanup(restore)
|
||||
|
||||
t.Setenv("HOME", t.TempDir())
|
||||
t.Setenv("USERPROFILE", os.Getenv("HOME"))
|
||||
|
||||
orig := tmpDirExecutableCheckFn
|
||||
tmpDirExecutableCheckFn = func(string) (bool, error) { return true, nil }
|
||||
t.Cleanup(func() { tmpDirExecutableCheckFn = orig })
|
||||
|
||||
override := filepath.Join(t.TempDir(), "mytmp")
|
||||
t.Setenv(tmpDirEnvOverrideKey, override)
|
||||
|
||||
ensureExecutableTempDir()
|
||||
|
||||
if got := os.Getenv("TMPDIR"); got != override {
|
||||
t.Fatalf("TMPDIR=%q, want %q", got, override)
|
||||
}
|
||||
if got := os.Getenv("TMP"); got != override {
|
||||
t.Fatalf("TMP=%q, want %q", got, override)
|
||||
}
|
||||
if got := os.Getenv("TEMP"); got != override {
|
||||
t.Fatalf("TEMP=%q, want %q", got, override)
|
||||
}
|
||||
if st, err := os.Stat(override); err != nil || !st.IsDir() {
|
||||
t.Fatalf("override dir not created: stat=%v err=%v", st, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnsureExecutableTempDir_FallbackWhenCurrentNotExecutable(t *testing.T) {
|
||||
restore := captureTempEnv()
|
||||
t.Cleanup(restore)
|
||||
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
cur := filepath.Join(t.TempDir(), "cur-tmp")
|
||||
if err := os.MkdirAll(cur, 0o700); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Setenv("TMPDIR", cur)
|
||||
|
||||
fallback := filepath.Join(home, ".codeagent", "tmp")
|
||||
|
||||
orig := tmpDirExecutableCheckFn
|
||||
tmpDirExecutableCheckFn = func(dir string) (bool, error) {
|
||||
if filepath.Clean(dir) == filepath.Clean(cur) {
|
||||
return false, nil
|
||||
}
|
||||
if filepath.Clean(dir) == filepath.Clean(fallback) {
|
||||
return true, nil
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
t.Cleanup(func() { tmpDirExecutableCheckFn = orig })
|
||||
|
||||
ensureExecutableTempDir()
|
||||
|
||||
if got := os.Getenv("TMPDIR"); filepath.Clean(got) != filepath.Clean(fallback) {
|
||||
t.Fatalf("TMPDIR=%q, want %q", got, fallback)
|
||||
}
|
||||
if st, err := os.Stat(fallback); err != nil || !st.IsDir() {
|
||||
t.Fatalf("fallback dir not created: stat=%v err=%v", st, err)
|
||||
}
|
||||
}
|
||||
|
||||
func captureTempEnv() func() {
|
||||
type entry struct {
|
||||
set bool
|
||||
val string
|
||||
}
|
||||
snapshot := make(map[string]entry, 3)
|
||||
for _, k := range []string{"TMPDIR", "TMP", "TEMP"} {
|
||||
v, ok := os.LookupEnv(k)
|
||||
snapshot[k] = entry{set: ok, val: v}
|
||||
}
|
||||
return func() {
|
||||
for k, e := range snapshot {
|
||||
if !e.set {
|
||||
_ = os.Unsetenv(k)
|
||||
continue
|
||||
}
|
||||
_ = os.Setenv(k, e.val)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,8 @@ func (ClaudeBackend) Env(baseURL, apiKey string) map[string]string {
|
||||
env["ANTHROPIC_BASE_URL"] = baseURL
|
||||
}
|
||||
if apiKey != "" {
|
||||
env["ANTHROPIC_AUTH_TOKEN"] = apiKey
|
||||
// Claude Code CLI uses ANTHROPIC_API_KEY for API-key based auth.
|
||||
env["ANTHROPIC_API_KEY"] = apiKey
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
54
codeagent-wrapper/internal/backend/codex_paths_test.go
Normal file
54
codeagent-wrapper/internal/backend/codex_paths_test.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
func TestBuildCodexArgs_Workdir_OSPaths(t *testing.T) {
|
||||
t.Setenv("CODEX_BYPASS_SANDBOX", "false")
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
workdir string
|
||||
}{
|
||||
{name: "windows drive forward slashes", workdir: "D:/repo/path"},
|
||||
{name: "windows drive backslashes", workdir: `C:\repo\path`},
|
||||
{name: "windows UNC", workdir: `\\server\share\repo`},
|
||||
{name: "unix absolute", workdir: "/home/user/repo"},
|
||||
{name: "relative", workdir: "./relative/repo"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "new", WorkDir: tt.workdir}
|
||||
got := BuildCodexArgs(cfg, "task")
|
||||
want := []string{"e", "--skip-git-repo-check", "-C", tt.workdir, "--json", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("BuildCodexArgs() = %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
t.Run("new mode stdin target uses dash", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "new", WorkDir: `C:\repo\path`}
|
||||
got := BuildCodexArgs(cfg, "-")
|
||||
want := []string{"e", "--skip-git-repo-check", "-C", `C:\repo\path`, "--json", "-"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("BuildCodexArgs() = %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestBuildCodexArgs_ResumeMode_OmitsWorkdir(t *testing.T) {
|
||||
t.Setenv("CODEX_BYPASS_SANDBOX", "false")
|
||||
|
||||
cfg := &config.Config{Mode: "resume", SessionID: "sid-123", WorkDir: `C:\repo\path`}
|
||||
got := BuildCodexArgs(cfg, "-")
|
||||
want := []string{"e", "--skip-git-repo-check", "--json", "resume", "sid-123", "-"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("BuildCodexArgs() = %v, want %v", got, want)
|
||||
}
|
||||
}
|
||||
@@ -7,8 +7,6 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
ilogger "codeagent-wrapper/internal/logger"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
@@ -35,80 +33,85 @@ type ModelsConfig struct {
|
||||
Backends map[string]BackendConfig `json:"backends,omitempty"`
|
||||
}
|
||||
|
||||
var defaultModelsConfig = ModelsConfig{
|
||||
DefaultBackend: "opencode",
|
||||
DefaultModel: "opencode/grok-code",
|
||||
Agents: map[string]AgentModelConfig{
|
||||
"oracle": {Backend: "claude", Model: "claude-opus-4-5-20251101", PromptFile: "~/.claude/skills/omo/references/oracle.md", Description: "Technical advisor"},
|
||||
"librarian": {Backend: "claude", Model: "claude-sonnet-4-5-20250929", PromptFile: "~/.claude/skills/omo/references/librarian.md", Description: "Researcher"},
|
||||
"explore": {Backend: "opencode", Model: "opencode/grok-code", PromptFile: "~/.claude/skills/omo/references/explore.md", Description: "Code search"},
|
||||
"develop": {Backend: "codex", Model: "", PromptFile: "~/.claude/skills/omo/references/develop.md", Description: "Code development"},
|
||||
"frontend-ui-ux-engineer": {Backend: "gemini", Model: "", PromptFile: "~/.claude/skills/omo/references/frontend-ui-ux-engineer.md", Description: "Frontend engineer"},
|
||||
"document-writer": {Backend: "gemini", Model: "", PromptFile: "~/.claude/skills/omo/references/document-writer.md", Description: "Documentation"},
|
||||
},
|
||||
}
|
||||
var defaultModelsConfig = ModelsConfig{}
|
||||
|
||||
const modelsConfigTildePath = "~/.codeagent/models.json"
|
||||
|
||||
const modelsConfigExample = `{
|
||||
"default_backend": "codex",
|
||||
"default_model": "gpt-4.1",
|
||||
"backends": {
|
||||
"codex": { "api_key": "..." },
|
||||
"claude": { "api_key": "..." }
|
||||
},
|
||||
"agents": {
|
||||
"develop": {
|
||||
"backend": "codex",
|
||||
"model": "gpt-4.1",
|
||||
"prompt_file": "~/.codeagent/prompts/develop.md",
|
||||
"reasoning": "high",
|
||||
"yolo": true
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
var (
|
||||
modelsConfigOnce sync.Once
|
||||
modelsConfigCached *ModelsConfig
|
||||
modelsConfigErr error
|
||||
)
|
||||
|
||||
func modelsConfig() *ModelsConfig {
|
||||
func modelsConfig() (*ModelsConfig, error) {
|
||||
modelsConfigOnce.Do(func() {
|
||||
modelsConfigCached = loadModelsConfig()
|
||||
modelsConfigCached, modelsConfigErr = loadModelsConfig()
|
||||
})
|
||||
if modelsConfigCached == nil {
|
||||
return &defaultModelsConfig
|
||||
}
|
||||
return modelsConfigCached
|
||||
return modelsConfigCached, modelsConfigErr
|
||||
}
|
||||
|
||||
func loadModelsConfig() *ModelsConfig {
|
||||
func modelsConfigPath() (string, error) {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
ilogger.LogWarn(fmt.Sprintf("Failed to resolve home directory for models config: %v; using defaults", err))
|
||||
return &defaultModelsConfig
|
||||
if err != nil || strings.TrimSpace(home) == "" {
|
||||
return "", fmt.Errorf("failed to resolve user home directory: %w", err)
|
||||
}
|
||||
|
||||
configDir := filepath.Clean(filepath.Join(home, ".codeagent"))
|
||||
configPath := filepath.Clean(filepath.Join(configDir, "models.json"))
|
||||
rel, err := filepath.Rel(configDir, configPath)
|
||||
if err != nil || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
|
||||
return &defaultModelsConfig
|
||||
return "", fmt.Errorf("refusing to read models config outside %s: %s", configDir, configPath)
|
||||
}
|
||||
return configPath, nil
|
||||
}
|
||||
|
||||
func modelsConfigHint(configPath string) string {
|
||||
configPath = strings.TrimSpace(configPath)
|
||||
if configPath == "" {
|
||||
return fmt.Sprintf("Create %s with e.g.:\n%s", modelsConfigTildePath, modelsConfigExample)
|
||||
}
|
||||
return fmt.Sprintf("Create %s (resolved to %s) with e.g.:\n%s", modelsConfigTildePath, configPath, modelsConfigExample)
|
||||
}
|
||||
|
||||
func loadModelsConfig() (*ModelsConfig, error) {
|
||||
configPath, err := modelsConfigPath()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w\n\n%s", err, modelsConfigHint(""))
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(configPath) // #nosec G304 -- path is fixed under user home and validated to stay within configDir
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
ilogger.LogWarn(fmt.Sprintf("Failed to read models config %s: %v; using defaults", configPath, err))
|
||||
if os.IsNotExist(err) {
|
||||
return nil, fmt.Errorf("models config not found: %s\n\n%s", configPath, modelsConfigHint(configPath))
|
||||
}
|
||||
return &defaultModelsConfig
|
||||
return nil, fmt.Errorf("failed to read models config %s: %w\n\n%s", configPath, err, modelsConfigHint(configPath))
|
||||
}
|
||||
|
||||
var cfg ModelsConfig
|
||||
if err := json.Unmarshal(data, &cfg); err != nil {
|
||||
ilogger.LogWarn(fmt.Sprintf("Failed to parse models config %s: %v; using defaults", configPath, err))
|
||||
return &defaultModelsConfig
|
||||
return nil, fmt.Errorf("failed to parse models config %s: %w\n\n%s", configPath, err, modelsConfigHint(configPath))
|
||||
}
|
||||
|
||||
cfg.DefaultBackend = strings.TrimSpace(cfg.DefaultBackend)
|
||||
if cfg.DefaultBackend == "" {
|
||||
cfg.DefaultBackend = defaultModelsConfig.DefaultBackend
|
||||
}
|
||||
cfg.DefaultModel = strings.TrimSpace(cfg.DefaultModel)
|
||||
if cfg.DefaultModel == "" {
|
||||
cfg.DefaultModel = defaultModelsConfig.DefaultModel
|
||||
}
|
||||
|
||||
// Merge with defaults
|
||||
for name, agent := range defaultModelsConfig.Agents {
|
||||
if _, exists := cfg.Agents[name]; !exists {
|
||||
if cfg.Agents == nil {
|
||||
cfg.Agents = make(map[string]AgentModelConfig)
|
||||
}
|
||||
cfg.Agents[name] = agent
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize backend keys so lookups can be case-insensitive.
|
||||
if len(cfg.Backends) > 0 {
|
||||
@@ -127,7 +130,7 @@ func loadModelsConfig() *ModelsConfig {
|
||||
}
|
||||
}
|
||||
|
||||
return &cfg
|
||||
return &cfg, nil
|
||||
}
|
||||
|
||||
func LoadDynamicAgent(name string) (AgentModelConfig, bool) {
|
||||
@@ -150,7 +153,10 @@ func LoadDynamicAgent(name string) (AgentModelConfig, bool) {
|
||||
}
|
||||
|
||||
func ResolveBackendConfig(backendName string) (baseURL, apiKey string) {
|
||||
cfg := modelsConfig()
|
||||
cfg, err := modelsConfig()
|
||||
if err != nil || cfg == nil {
|
||||
return "", ""
|
||||
}
|
||||
resolved := resolveBackendConfig(cfg, backendName)
|
||||
return strings.TrimSpace(resolved.BaseURL), strings.TrimSpace(resolved.APIKey)
|
||||
}
|
||||
@@ -172,12 +178,30 @@ func resolveBackendConfig(cfg *ModelsConfig, backendName string) BackendConfig {
|
||||
return BackendConfig{}
|
||||
}
|
||||
|
||||
func resolveAgentConfig(agentName string) (backend, model, promptFile, reasoning, baseURL, apiKey string, yolo bool) {
|
||||
cfg := modelsConfig()
|
||||
func resolveAgentConfig(agentName string) (backend, model, promptFile, reasoning, baseURL, apiKey string, yolo bool, err error) {
|
||||
if err := ValidateAgentName(agentName); err != nil {
|
||||
return "", "", "", "", "", "", false, err
|
||||
}
|
||||
|
||||
cfg, err := modelsConfig()
|
||||
if err != nil {
|
||||
return "", "", "", "", "", "", false, err
|
||||
}
|
||||
if cfg == nil {
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("models config is nil\n\n%s", modelsConfigHint(""))
|
||||
}
|
||||
|
||||
if agent, ok := cfg.Agents[agentName]; ok {
|
||||
backend = strings.TrimSpace(agent.Backend)
|
||||
if backend == "" {
|
||||
backend = cfg.DefaultBackend
|
||||
backend = strings.TrimSpace(cfg.DefaultBackend)
|
||||
if backend == "" {
|
||||
configPath, pathErr := modelsConfigPath()
|
||||
if pathErr != nil {
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("agent %q has empty backend and default_backend is not set\n\n%s", agentName, modelsConfigHint(""))
|
||||
}
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("agent %q has empty backend and default_backend is not set\n\n%s", agentName, modelsConfigHint(configPath))
|
||||
}
|
||||
}
|
||||
backendCfg := resolveBackendConfig(cfg, backend)
|
||||
|
||||
@@ -190,31 +214,46 @@ func resolveAgentConfig(agentName string) (backend, model, promptFile, reasoning
|
||||
apiKey = strings.TrimSpace(backendCfg.APIKey)
|
||||
}
|
||||
|
||||
return backend, strings.TrimSpace(agent.Model), agent.PromptFile, agent.Reasoning, baseURL, apiKey, agent.Yolo
|
||||
model = strings.TrimSpace(agent.Model)
|
||||
if model == "" {
|
||||
configPath, pathErr := modelsConfigPath()
|
||||
if pathErr != nil {
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("agent %q has empty model; set agents.%s.model in %s\n\n%s", agentName, agentName, modelsConfigTildePath, modelsConfigHint(""))
|
||||
}
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("agent %q has empty model; set agents.%s.model in %s\n\n%s", agentName, agentName, modelsConfigTildePath, modelsConfigHint(configPath))
|
||||
}
|
||||
return backend, model, agent.PromptFile, agent.Reasoning, baseURL, apiKey, agent.Yolo, nil
|
||||
}
|
||||
|
||||
if dynamic, ok := LoadDynamicAgent(agentName); ok {
|
||||
backend = cfg.DefaultBackend
|
||||
model = cfg.DefaultModel
|
||||
backend = strings.TrimSpace(cfg.DefaultBackend)
|
||||
model = strings.TrimSpace(cfg.DefaultModel)
|
||||
configPath, pathErr := modelsConfigPath()
|
||||
if backend == "" || model == "" {
|
||||
if pathErr != nil {
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("dynamic agent %q requires default_backend and default_model to be set in %s\n\n%s", agentName, modelsConfigTildePath, modelsConfigHint(""))
|
||||
}
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("dynamic agent %q requires default_backend and default_model to be set in %s\n\n%s", agentName, modelsConfigTildePath, modelsConfigHint(configPath))
|
||||
}
|
||||
backendCfg := resolveBackendConfig(cfg, backend)
|
||||
baseURL = strings.TrimSpace(backendCfg.BaseURL)
|
||||
apiKey = strings.TrimSpace(backendCfg.APIKey)
|
||||
return backend, model, dynamic.PromptFile, "", baseURL, apiKey, false
|
||||
return backend, model, dynamic.PromptFile, "", baseURL, apiKey, false, nil
|
||||
}
|
||||
|
||||
backend = cfg.DefaultBackend
|
||||
model = cfg.DefaultModel
|
||||
backendCfg := resolveBackendConfig(cfg, backend)
|
||||
baseURL = strings.TrimSpace(backendCfg.BaseURL)
|
||||
apiKey = strings.TrimSpace(backendCfg.APIKey)
|
||||
return backend, model, "", "", baseURL, apiKey, false
|
||||
configPath, pathErr := modelsConfigPath()
|
||||
if pathErr != nil {
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("agent %q not found in %s\n\n%s", agentName, modelsConfigTildePath, modelsConfigHint(""))
|
||||
}
|
||||
return "", "", "", "", "", "", false, fmt.Errorf("agent %q not found in %s\n\n%s", agentName, modelsConfigTildePath, modelsConfigHint(configPath))
|
||||
}
|
||||
|
||||
func ResolveAgentConfig(agentName string) (backend, model, promptFile, reasoning, baseURL, apiKey string, yolo bool) {
|
||||
func ResolveAgentConfig(agentName string) (backend, model, promptFile, reasoning, baseURL, apiKey string, yolo bool, err error) {
|
||||
return resolveAgentConfig(agentName)
|
||||
}
|
||||
|
||||
func ResetModelsConfigCacheForTest() {
|
||||
modelsConfigCached = nil
|
||||
modelsConfigErr = nil
|
||||
modelsConfigOnce = sync.Once{}
|
||||
}
|
||||
|
||||
@@ -3,78 +3,43 @@ package config
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestResolveAgentConfig_Defaults(t *testing.T) {
|
||||
func TestResolveAgentConfig_NoConfig_ReturnsHelpfulError(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
// Test that default agents resolve correctly without config file
|
||||
tests := []struct {
|
||||
agent string
|
||||
wantBackend string
|
||||
wantModel string
|
||||
wantPromptFile string
|
||||
}{
|
||||
{"oracle", "claude", "claude-opus-4-5-20251101", "~/.claude/skills/omo/references/oracle.md"},
|
||||
{"librarian", "claude", "claude-sonnet-4-5-20250929", "~/.claude/skills/omo/references/librarian.md"},
|
||||
{"explore", "opencode", "opencode/grok-code", "~/.claude/skills/omo/references/explore.md"},
|
||||
{"frontend-ui-ux-engineer", "gemini", "", "~/.claude/skills/omo/references/frontend-ui-ux-engineer.md"},
|
||||
{"document-writer", "gemini", "", "~/.claude/skills/omo/references/document-writer.md"},
|
||||
_, _, _, _, _, _, _, err := ResolveAgentConfig("develop")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.agent, func(t *testing.T) {
|
||||
backend, model, promptFile, _, _, _, _ := resolveAgentConfig(tt.agent)
|
||||
if backend != tt.wantBackend {
|
||||
t.Errorf("backend = %q, want %q", backend, tt.wantBackend)
|
||||
}
|
||||
if model != tt.wantModel {
|
||||
t.Errorf("model = %q, want %q", model, tt.wantModel)
|
||||
}
|
||||
if promptFile != tt.wantPromptFile {
|
||||
t.Errorf("promptFile = %q, want %q", promptFile, tt.wantPromptFile)
|
||||
}
|
||||
})
|
||||
msg := err.Error()
|
||||
if !strings.Contains(msg, modelsConfigTildePath) {
|
||||
t.Fatalf("error should mention %s, got: %s", modelsConfigTildePath, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveAgentConfig_UnknownAgent(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
backend, model, promptFile, _, _, _, _ := resolveAgentConfig("unknown-agent")
|
||||
if backend != "opencode" {
|
||||
t.Errorf("unknown agent backend = %q, want %q", backend, "opencode")
|
||||
if !strings.Contains(msg, filepath.Join(home, ".codeagent", "models.json")) {
|
||||
t.Fatalf("error should mention resolved config path, got: %s", msg)
|
||||
}
|
||||
if model != "opencode/grok-code" {
|
||||
t.Errorf("unknown agent model = %q, want %q", model, "opencode/grok-code")
|
||||
}
|
||||
if promptFile != "" {
|
||||
t.Errorf("unknown agent promptFile = %q, want empty", promptFile)
|
||||
if !strings.Contains(msg, "\"agents\"") {
|
||||
t.Fatalf("error should include example config, got: %s", msg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadModelsConfig_NoFile(t *testing.T) {
|
||||
home := "/nonexistent/path/that/does/not/exist"
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
cfg := loadModelsConfig()
|
||||
if cfg.DefaultBackend != "opencode" {
|
||||
t.Errorf("DefaultBackend = %q, want %q", cfg.DefaultBackend, "opencode")
|
||||
}
|
||||
if len(cfg.Agents) != 6 {
|
||||
t.Errorf("len(Agents) = %d, want 6", len(cfg.Agents))
|
||||
_, err := loadModelsConfig()
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,7 +84,10 @@ func TestLoadModelsConfig_WithFile(t *testing.T) {
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
cfg := loadModelsConfig()
|
||||
cfg, err := loadModelsConfig()
|
||||
if err != nil {
|
||||
t.Fatalf("loadModelsConfig: %v", err)
|
||||
}
|
||||
|
||||
if cfg.DefaultBackend != "claude" {
|
||||
t.Errorf("DefaultBackend = %q, want %q", cfg.DefaultBackend, "claude")
|
||||
@@ -140,9 +108,8 @@ func TestLoadModelsConfig_WithFile(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// Check that defaults are merged
|
||||
if _, ok := cfg.Agents["oracle"]; !ok {
|
||||
t.Error("default agent oracle should be merged")
|
||||
if _, ok := cfg.Agents["oracle"]; ok {
|
||||
t.Error("oracle should not be present without explicit config")
|
||||
}
|
||||
|
||||
baseURL, apiKey := ResolveBackendConfig("claude")
|
||||
@@ -153,7 +120,10 @@ func TestLoadModelsConfig_WithFile(t *testing.T) {
|
||||
t.Errorf("ResolveBackendConfig(apiKey) = %q, want %q", apiKey, "backend-key")
|
||||
}
|
||||
|
||||
backend, model, _, _, agentBaseURL, agentAPIKey, _ := ResolveAgentConfig("custom-agent")
|
||||
backend, model, _, _, agentBaseURL, agentAPIKey, _, err := ResolveAgentConfig("custom-agent")
|
||||
if err != nil {
|
||||
t.Fatalf("ResolveAgentConfig(custom-agent): %v", err)
|
||||
}
|
||||
if backend != "codex" {
|
||||
t.Errorf("ResolveAgentConfig(backend) = %q, want %q", backend, "codex")
|
||||
}
|
||||
@@ -183,12 +153,26 @@ func TestResolveAgentConfig_DynamicAgent(t *testing.T) {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
backend, model, promptFile, _, _, _, _ := resolveAgentConfig("sarsh")
|
||||
if backend != "opencode" {
|
||||
t.Errorf("backend = %q, want %q", backend, "opencode")
|
||||
configDir := filepath.Join(home, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if model != "opencode/grok-code" {
|
||||
t.Errorf("model = %q, want %q", model, "opencode/grok-code")
|
||||
if err := os.WriteFile(filepath.Join(configDir, "models.json"), []byte(`{
|
||||
"default_backend": "codex",
|
||||
"default_model": "gpt-test"
|
||||
}`), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
backend, model, promptFile, _, _, _, _, err := ResolveAgentConfig("sarsh")
|
||||
if err != nil {
|
||||
t.Fatalf("ResolveAgentConfig(sarsh): %v", err)
|
||||
}
|
||||
if backend != "codex" {
|
||||
t.Errorf("backend = %q, want %q", backend, "codex")
|
||||
}
|
||||
if model != "gpt-test" {
|
||||
t.Errorf("model = %q, want %q", model, "gpt-test")
|
||||
}
|
||||
if promptFile != "~/.codeagent/agents/sarsh.md" {
|
||||
t.Errorf("promptFile = %q, want %q", promptFile, "~/.codeagent/agents/sarsh.md")
|
||||
@@ -213,9 +197,66 @@ func TestLoadModelsConfig_InvalidJSON(t *testing.T) {
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
cfg := loadModelsConfig()
|
||||
// Should fall back to defaults
|
||||
if cfg.DefaultBackend != "opencode" {
|
||||
t.Errorf("invalid JSON should fallback, got DefaultBackend = %q", cfg.DefaultBackend)
|
||||
_, err := loadModelsConfig()
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveAgentConfig_UnknownAgent_ReturnsError(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
configDir := filepath.Join(home, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(configDir, "models.json"), []byte(`{
|
||||
"default_backend": "codex",
|
||||
"default_model": "gpt-test",
|
||||
"agents": {
|
||||
"develop": { "backend": "codex", "model": "gpt-test" }
|
||||
}
|
||||
}`), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
_, _, _, _, _, _, _, err := ResolveAgentConfig("unknown-agent")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "unknown-agent") {
|
||||
t.Fatalf("error should mention agent name, got: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveAgentConfig_EmptyModel_ReturnsError(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
configDir := filepath.Join(home, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(configDir, "models.json"), []byte(`{
|
||||
"agents": {
|
||||
"bad-agent": { "backend": "codex", "model": " " }
|
||||
}
|
||||
}`), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
_, _, _, _, _, _, _, err := ResolveAgentConfig("bad-agent")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
if !strings.Contains(strings.ToLower(err.Error()), "empty model") {
|
||||
t.Fatalf("error should mention empty model, got: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,17 +36,18 @@ func TestEnvInjectionWithAgent(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Override HOME to use temp dir
|
||||
oldHome := os.Getenv("HOME")
|
||||
os.Setenv("HOME", tmpDir)
|
||||
defer os.Setenv("HOME", oldHome)
|
||||
t.Setenv("HOME", tmpDir)
|
||||
t.Setenv("USERPROFILE", tmpDir)
|
||||
|
||||
// Reset config cache
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
defer config.ResetModelsConfigCacheForTest()
|
||||
|
||||
// Test ResolveAgentConfig
|
||||
agentBackend, model, _, _, baseURL, apiKey, _ := config.ResolveAgentConfig("test-agent")
|
||||
agentBackend, model, _, _, baseURL, apiKey, _, err := config.ResolveAgentConfig("test-agent")
|
||||
if err != nil {
|
||||
t.Fatalf("ResolveAgentConfig: %v", err)
|
||||
}
|
||||
t.Logf("ResolveAgentConfig: backend=%q, model=%q, baseURL=%q, apiKey=%q",
|
||||
agentBackend, model, baseURL, apiKey)
|
||||
|
||||
@@ -71,8 +72,8 @@ func TestEnvInjectionWithAgent(t *testing.T) {
|
||||
if env["ANTHROPIC_BASE_URL"] != baseURL {
|
||||
t.Errorf("expected ANTHROPIC_BASE_URL=%q, got %q", baseURL, env["ANTHROPIC_BASE_URL"])
|
||||
}
|
||||
if env["ANTHROPIC_AUTH_TOKEN"] != apiKey {
|
||||
t.Errorf("expected ANTHROPIC_AUTH_TOKEN=%q, got %q", apiKey, env["ANTHROPIC_AUTH_TOKEN"])
|
||||
if env["ANTHROPIC_API_KEY"] != apiKey {
|
||||
t.Errorf("expected ANTHROPIC_API_KEY=%q, got %q", apiKey, env["ANTHROPIC_API_KEY"])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,9 +102,8 @@ func TestEnvInjectionLogic(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
oldHome := os.Getenv("HOME")
|
||||
os.Setenv("HOME", tmpDir)
|
||||
defer os.Setenv("HOME", oldHome)
|
||||
t.Setenv("HOME", tmpDir)
|
||||
t.Setenv("USERPROFILE", tmpDir)
|
||||
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
defer config.ResetModelsConfigCacheForTest()
|
||||
@@ -118,7 +118,10 @@ func TestEnvInjectionLogic(t *testing.T) {
|
||||
|
||||
// Step 2: If agent specified, get agent config
|
||||
if agentName != "" {
|
||||
agentBackend, _, _, _, agentBaseURL, agentAPIKey, _ := config.ResolveAgentConfig(agentName)
|
||||
agentBackend, _, _, _, agentBaseURL, agentAPIKey, _, err := config.ResolveAgentConfig(agentName)
|
||||
if err != nil {
|
||||
t.Fatalf("ResolveAgentConfig(%q): %v", agentName, err)
|
||||
}
|
||||
t.Logf("Step 2 - ResolveAgentConfig(%q): backend=%q, baseURL=%q, apiKey=%q",
|
||||
agentName, agentBackend, agentBaseURL, agentAPIKey)
|
||||
|
||||
@@ -146,8 +149,8 @@ func TestEnvInjectionLogic(t *testing.T) {
|
||||
t.Errorf("ANTHROPIC_BASE_URL: expected %q, got %q", expectedURL, injected["ANTHROPIC_BASE_URL"])
|
||||
}
|
||||
|
||||
if _, ok := injected["ANTHROPIC_AUTH_TOKEN"]; !ok {
|
||||
t.Error("ANTHROPIC_AUTH_TOKEN not set")
|
||||
if _, ok := injected["ANTHROPIC_API_KEY"]; !ok {
|
||||
t.Error("ANTHROPIC_API_KEY not set")
|
||||
}
|
||||
|
||||
// Step 5: Test masking
|
||||
|
||||
@@ -16,7 +16,7 @@ func TestMaskSensitiveValue(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "API_KEY with long value",
|
||||
key: "ANTHROPIC_AUTH_TOKEN",
|
||||
key: "ANTHROPIC_API_KEY",
|
||||
value: "sk-ant-api03-xxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
expected: "sk-a****xxxx",
|
||||
},
|
||||
@@ -180,7 +180,7 @@ func TestClaudeBackendEnv(t *testing.T) {
|
||||
name: "both base_url and api_key",
|
||||
baseURL: "https://api.custom.com",
|
||||
apiKey: "sk-test-key-12345",
|
||||
expectKeys: []string{"ANTHROPIC_BASE_URL", "ANTHROPIC_AUTH_TOKEN"},
|
||||
expectKeys: []string{"ANTHROPIC_BASE_URL", "ANTHROPIC_API_KEY"},
|
||||
},
|
||||
{
|
||||
name: "only base_url",
|
||||
@@ -192,7 +192,7 @@ func TestClaudeBackendEnv(t *testing.T) {
|
||||
name: "only api_key",
|
||||
baseURL: "",
|
||||
apiKey: "sk-test-key-12345",
|
||||
expectKeys: []string{"ANTHROPIC_AUTH_TOKEN"},
|
||||
expectKeys: []string{"ANTHROPIC_API_KEY"},
|
||||
},
|
||||
{
|
||||
name: "both empty",
|
||||
@@ -237,8 +237,8 @@ func TestClaudeBackendEnv(t *testing.T) {
|
||||
}
|
||||
}
|
||||
if tt.apiKey != "" && strings.TrimSpace(tt.apiKey) != "" {
|
||||
if env["ANTHROPIC_AUTH_TOKEN"] != strings.TrimSpace(tt.apiKey) {
|
||||
t.Errorf("ANTHROPIC_AUTH_TOKEN = %q, want %q", env["ANTHROPIC_AUTH_TOKEN"], strings.TrimSpace(tt.apiKey))
|
||||
if env["ANTHROPIC_API_KEY"] != strings.TrimSpace(tt.apiKey) {
|
||||
t.Errorf("ANTHROPIC_API_KEY = %q, want %q", env["ANTHROPIC_API_KEY"], strings.TrimSpace(tt.apiKey))
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -267,7 +267,7 @@ func TestEnvLoggingIntegration(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
if k == "ANTHROPIC_AUTH_TOKEN" {
|
||||
if k == "ANTHROPIC_API_KEY" {
|
||||
// API key should be masked
|
||||
if masked == v {
|
||||
t.Errorf("API_KEY should be masked, but got original value")
|
||||
|
||||
@@ -65,11 +65,8 @@ func TestEnvInjection_LogsToStderrAndMasksKey(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
oldHome := os.Getenv("HOME")
|
||||
if err := os.Setenv("HOME", tmpDir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer func() { _ = os.Setenv("HOME", oldHome) }()
|
||||
t.Setenv("HOME", tmpDir)
|
||||
t.Setenv("USERPROFILE", tmpDir)
|
||||
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
defer config.ResetModelsConfigCacheForTest()
|
||||
@@ -120,14 +117,14 @@ func TestEnvInjection_LogsToStderrAndMasksKey(t *testing.T) {
|
||||
if cmd.env["ANTHROPIC_BASE_URL"] != baseURL {
|
||||
t.Fatalf("ANTHROPIC_BASE_URL=%q, want %q", cmd.env["ANTHROPIC_BASE_URL"], baseURL)
|
||||
}
|
||||
if cmd.env["ANTHROPIC_AUTH_TOKEN"] != apiKey {
|
||||
t.Fatalf("ANTHROPIC_AUTH_TOKEN=%q, want %q", cmd.env["ANTHROPIC_AUTH_TOKEN"], apiKey)
|
||||
if cmd.env["ANTHROPIC_API_KEY"] != apiKey {
|
||||
t.Fatalf("ANTHROPIC_API_KEY=%q, want %q", cmd.env["ANTHROPIC_API_KEY"], apiKey)
|
||||
}
|
||||
|
||||
if !strings.Contains(got, "Env: ANTHROPIC_BASE_URL="+baseURL) {
|
||||
t.Fatalf("stderr missing base URL env log; stderr=%q", got)
|
||||
}
|
||||
if !strings.Contains(got, "Env: ANTHROPIC_AUTH_TOKEN=eyJh****test") {
|
||||
if !strings.Contains(got, "Env: ANTHROPIC_API_KEY=eyJh****test") {
|
||||
t.Fatalf("stderr missing masked API key log; stderr=%q", got)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
@@ -253,6 +254,15 @@ func (p *realProcess) Signal(sig os.Signal) error {
|
||||
|
||||
// newCommandRunner creates a new commandRunner (test hook injection point)
|
||||
var newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
if runtime.GOOS == "windows" {
|
||||
lowerName := strings.ToLower(strings.TrimSpace(name))
|
||||
if strings.HasSuffix(lowerName, ".bat") || strings.HasSuffix(lowerName, ".cmd") {
|
||||
cmdArgs := make([]string, 0, 2+len(args))
|
||||
cmdArgs = append(cmdArgs, "/c", name)
|
||||
cmdArgs = append(cmdArgs, args...)
|
||||
return &realCmd{cmd: commandContext(ctx, "cmd.exe", cmdArgs...)}
|
||||
}
|
||||
}
|
||||
return &realCmd{cmd: commandContext(ctx, name, args...)}
|
||||
}
|
||||
|
||||
@@ -1060,9 +1070,11 @@ func RunCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
if envBackend != nil {
|
||||
baseURL, apiKey := config.ResolveBackendConfig(cfg.Backend)
|
||||
if agentName := strings.TrimSpace(taskSpec.Agent); agentName != "" {
|
||||
agentBackend, _, _, _, agentBaseURL, agentAPIKey, _ := config.ResolveAgentConfig(agentName)
|
||||
if strings.EqualFold(strings.TrimSpace(agentBackend), strings.TrimSpace(cfg.Backend)) {
|
||||
baseURL, apiKey = agentBaseURL, agentAPIKey
|
||||
agentBackend, _, _, _, agentBaseURL, agentAPIKey, _, err := config.ResolveAgentConfig(agentName)
|
||||
if err == nil {
|
||||
if strings.EqualFold(strings.TrimSpace(agentBackend), strings.TrimSpace(cfg.Backend)) {
|
||||
baseURL, apiKey = agentBaseURL, agentAPIKey
|
||||
}
|
||||
}
|
||||
}
|
||||
if injected := envBackend.Env(baseURL, apiKey); len(injected) > 0 {
|
||||
@@ -1076,6 +1088,8 @@ func RunCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
}
|
||||
}
|
||||
|
||||
injectTempEnv(cmd)
|
||||
|
||||
// For backends that don't support -C flag (claude, gemini), set working directory via cmd.Dir
|
||||
// Codex passes workdir via -C flag, so we skip setting Dir for it to avoid conflicts
|
||||
if cfg.Mode != "resume" && commandName != "codex" && cfg.WorkDir != "" {
|
||||
@@ -1385,6 +1399,22 @@ waitLoop:
|
||||
return result
|
||||
}
|
||||
|
||||
func injectTempEnv(cmd commandRunner) {
|
||||
if cmd == nil {
|
||||
return
|
||||
}
|
||||
env := make(map[string]string, 3)
|
||||
for _, k := range []string{"TMPDIR", "TMP", "TEMP"} {
|
||||
if v := strings.TrimSpace(os.Getenv(k)); v != "" {
|
||||
env[k] = v
|
||||
}
|
||||
}
|
||||
if len(env) == 0 {
|
||||
return
|
||||
}
|
||||
cmd.SetEnv(env)
|
||||
}
|
||||
|
||||
func cancelReason(commandName string, ctx context.Context) string {
|
||||
if ctx == nil {
|
||||
return "Context cancelled"
|
||||
|
||||
@@ -93,14 +93,17 @@ func ParseParallelConfig(data []byte) (*ParallelConfig, error) {
|
||||
if strings.TrimSpace(task.Agent) == "" {
|
||||
return nil, fmt.Errorf("task block #%d has empty agent field", taskIndex)
|
||||
}
|
||||
if err := config.ValidateAgentName(task.Agent); err != nil {
|
||||
return nil, fmt.Errorf("task block #%d invalid agent name: %w", taskIndex, err)
|
||||
}
|
||||
backend, model, promptFile, reasoning, _, _, _ := config.ResolveAgentConfig(task.Agent)
|
||||
if task.Backend == "" {
|
||||
task.Backend = backend
|
||||
}
|
||||
if task.Model == "" {
|
||||
if err := config.ValidateAgentName(task.Agent); err != nil {
|
||||
return nil, fmt.Errorf("task block #%d invalid agent name: %w", taskIndex, err)
|
||||
}
|
||||
backend, model, promptFile, reasoning, _, _, _, err := config.ResolveAgentConfig(task.Agent)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("task block #%d failed to resolve agent %q: %w", taskIndex, task.Agent, err)
|
||||
}
|
||||
if task.Backend == "" {
|
||||
task.Backend = backend
|
||||
}
|
||||
if task.Model == "" {
|
||||
task.Model = model
|
||||
}
|
||||
if task.ReasoningEffort == "" {
|
||||
|
||||
@@ -70,12 +70,11 @@ func TestLoggerWithSuffixNamingAndIsolation(t *testing.T) {
|
||||
|
||||
func TestLoggerWithSuffixReturnsErrorWhenTempDirNotWritable(t *testing.T) {
|
||||
base := t.TempDir()
|
||||
noWrite := filepath.Join(base, "ro")
|
||||
if err := os.Mkdir(noWrite, 0o500); err != nil {
|
||||
t.Fatalf("failed to create read-only temp dir: %v", err)
|
||||
notDir := filepath.Join(base, "not-a-dir")
|
||||
if err := os.WriteFile(notDir, []byte("x"), 0o644); err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
t.Cleanup(func() { _ = os.Chmod(noWrite, 0o700) })
|
||||
setTempDirEnv(t, noWrite)
|
||||
setTempDirEnv(t, notDir)
|
||||
|
||||
logger, err := NewLoggerWithSuffix("task-err")
|
||||
if err == nil {
|
||||
|
||||
@@ -26,8 +26,7 @@ func compareCleanupStats(got, want CleanupStats) bool {
|
||||
}
|
||||
|
||||
func TestLoggerCreatesFileWithPID(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -46,8 +45,7 @@ func TestLoggerCreatesFileWithPID(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLoggerWritesLevels(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -77,8 +75,7 @@ func TestLoggerWritesLevels(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLoggerCloseStopsWorkerAndKeepsFile(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -104,8 +101,7 @@ func TestLoggerCloseStopsWorkerAndKeepsFile(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLoggerConcurrentWritesSafe(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
@@ -390,12 +386,14 @@ func TestLoggerCleanupOldLogsPerformanceBound(t *testing.T) {
|
||||
fakePaths := make([]string, fileCount)
|
||||
for i := 0; i < fileCount; i++ {
|
||||
name := fmt.Sprintf("codeagent-wrapper-%d.log", 10000+i)
|
||||
fakePaths[i] = createTempLog(t, tempDir, name)
|
||||
fakePaths[i] = filepath.Join(tempDir, name)
|
||||
}
|
||||
|
||||
stubGlobLogFiles(t, func(pattern string) ([]string, error) {
|
||||
return fakePaths, nil
|
||||
})
|
||||
stubFileStat(t, func(string) (os.FileInfo, error) { return fakeFileInfo{}, nil })
|
||||
stubEvalSymlinks(t, func(path string) (string, error) { return path, nil })
|
||||
stubProcessRunning(t, func(int) bool { return false })
|
||||
stubProcessStartTime(t, func(int) time.Time { return time.Time{} })
|
||||
|
||||
@@ -542,8 +540,7 @@ func TestLoggerIsUnsafeFileSecurityChecks(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLoggerPathAndRemove(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
setTempDirEnv(t, t.TempDir())
|
||||
|
||||
logger, err := NewLoggerWithSuffix("sample")
|
||||
if err != nil {
|
||||
|
||||
62
install.py
62
install.py
@@ -518,6 +518,11 @@ def uninstall_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dic
|
||||
|
||||
install_dir = ctx["install_dir"]
|
||||
removed_paths = []
|
||||
status = load_installed_status(ctx)
|
||||
module_status = status.get("modules", {}).get(name, {})
|
||||
merge_dir_files = module_status.get("merge_dir_files", [])
|
||||
if not isinstance(merge_dir_files, list):
|
||||
merge_dir_files = []
|
||||
|
||||
for op in cfg.get("operations", []):
|
||||
op_type = op.get("type")
|
||||
@@ -531,7 +536,55 @@ def uninstall_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dic
|
||||
target.unlink()
|
||||
removed_paths.append(str(target))
|
||||
write_log({"level": "INFO", "message": f"Removed: {target}"}, ctx)
|
||||
# merge_dir and merge_json are harder to uninstall cleanly, skip
|
||||
elif op_type == "merge_dir":
|
||||
if not merge_dir_files:
|
||||
write_log(
|
||||
{
|
||||
"level": "WARNING",
|
||||
"message": f"No merge_dir_files recorded for {name}; skip merge_dir uninstall",
|
||||
},
|
||||
ctx,
|
||||
)
|
||||
continue
|
||||
|
||||
for rel in dict.fromkeys(merge_dir_files):
|
||||
rel_path = Path(str(rel))
|
||||
if rel_path.is_absolute() or ".." in rel_path.parts:
|
||||
write_log(
|
||||
{
|
||||
"level": "WARNING",
|
||||
"message": f"Skip unsafe merge_dir path for {name}: {rel}",
|
||||
},
|
||||
ctx,
|
||||
)
|
||||
continue
|
||||
|
||||
target = (install_dir / rel_path).resolve()
|
||||
if target == install_dir or install_dir not in target.parents:
|
||||
write_log(
|
||||
{
|
||||
"level": "WARNING",
|
||||
"message": f"Skip out-of-tree merge_dir path for {name}: {rel}",
|
||||
},
|
||||
ctx,
|
||||
)
|
||||
continue
|
||||
|
||||
if target.exists():
|
||||
if target.is_dir():
|
||||
shutil.rmtree(target)
|
||||
else:
|
||||
target.unlink()
|
||||
removed_paths.append(str(target))
|
||||
write_log({"level": "INFO", "message": f"Removed: {target}"}, ctx)
|
||||
|
||||
parent = target.parent
|
||||
while parent != install_dir and parent.exists():
|
||||
try:
|
||||
parent.rmdir()
|
||||
except OSError:
|
||||
break
|
||||
parent = parent.parent
|
||||
except Exception as exc:
|
||||
write_log({"level": "WARNING", "message": f"Failed to remove {op.get('target', 'unknown')}: {exc}"}, ctx)
|
||||
|
||||
@@ -720,7 +773,9 @@ def execute_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[
|
||||
elif op_type == "copy_file":
|
||||
op_copy_file(op, ctx)
|
||||
elif op_type == "merge_dir":
|
||||
op_merge_dir(op, ctx)
|
||||
merged = op_merge_dir(op, ctx)
|
||||
if merged:
|
||||
result.setdefault("merge_dir_files", []).extend(merged)
|
||||
elif op_type == "merge_json":
|
||||
op_merge_json(op, ctx)
|
||||
elif op_type == "run_command":
|
||||
@@ -792,7 +847,7 @@ def op_copy_dir(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
|
||||
write_log({"level": "INFO", "message": f"Copied dir {src} -> {dst}"}, ctx)
|
||||
|
||||
|
||||
def op_merge_dir(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
|
||||
def op_merge_dir(op: Dict[str, Any], ctx: Dict[str, Any]) -> List[str]:
|
||||
"""Merge source dir's subdirs (commands/, agents/, etc.) into install_dir."""
|
||||
src = _source_path(op, ctx)
|
||||
install_dir = ctx["install_dir"]
|
||||
@@ -813,6 +868,7 @@ def op_merge_dir(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
|
||||
merged.append(f"{subdir.name}/{f.name}")
|
||||
|
||||
write_log({"level": "INFO", "message": f"Merged {src.name}: {', '.join(merged) or 'no files'}"}, ctx)
|
||||
return merged
|
||||
|
||||
|
||||
def op_copy_file(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
|
||||
|
||||
@@ -158,7 +158,7 @@ EOF
|
||||
|
||||
## ~/.codeagent/models.json Configuration
|
||||
|
||||
Optional. Uses codeagent-wrapper built-in config by default. To customize:
|
||||
Required when using `agent:` in parallel tasks or `--agent`. Create `~/.codeagent/models.json` to configure agent → backend/model mappings:
|
||||
|
||||
```json
|
||||
{
|
||||
|
||||
@@ -45,6 +45,8 @@ To abort early, set `active: false` in the state file.
|
||||
4. **Pass complete context forward.** Every agent invocation includes the Context Pack.
|
||||
5. **Parallel-first.** Run independent tasks via `codeagent-wrapper --parallel`.
|
||||
6. **Update state after each phase.** Keep `.claude/do.{task_id}.local.md` current.
|
||||
7. **Expect long-running `codeagent-wrapper` calls.** High-reasoning modes (e.g. `xhigh`) can take a long time; stay in the orchestrator role and wait for agents to complete.
|
||||
8. **Timeouts are not an escape hatch.** If a `codeagent-wrapper` invocation times out/errors, retry `codeagent-wrapper` (split/narrow the task if needed); never switch to direct implementation.
|
||||
|
||||
## Agents
|
||||
|
||||
|
||||
Reference in New Issue
Block a user