feat install.py

This commit is contained in:
cexll
2025-12-05 10:28:18 +08:00
parent 386937cfb3
commit c3dd5b567f
7 changed files with 1457 additions and 0 deletions

89
config.json Normal file
View File

@@ -0,0 +1,89 @@
{
"version": "1.0",
"install_dir": "~/.claude",
"log_file": "install.log",
"modules": {
"dev": {
"enabled": true,
"description": "Core dev workflow with Codex integration",
"operations": [
{
"type": "merge_dir",
"source": "dev-workflow",
"description": "Merge commands/ and agents/ into install dir"
},
{
"type": "copy_file",
"source": "memorys/CLAUDE.md",
"target": "CLAUDE.md",
"description": "Copy core role and guidelines"
},
{
"type": "copy_file",
"source": "skills/codex/SKILL.md",
"target": "skills/codex/SKILL.md",
"description": "Install codex skill"
},
{
"type": "run_command",
"command": "bash install.sh",
"description": "Install codex-wrapper binary",
"env": {
"INSTALL_DIR": "${install_dir}"
}
}
]
},
"bmad": {
"enabled": false,
"description": "BMAD agile workflow with multi-agent orchestration",
"operations": [
{
"type": "merge_dir",
"source": "bmad-agile-workflow",
"description": "Merge BMAD commands and agents"
},
{
"type": "copy_file",
"source": "docs/BMAD-WORKFLOW.md",
"target": "docs/BMAD-WORKFLOW.md",
"description": "Copy BMAD workflow documentation"
}
]
},
"requirements": {
"enabled": false,
"description": "Requirements-driven development workflow",
"operations": [
{
"type": "merge_dir",
"source": "requirements-driven-workflow",
"description": "Merge requirements workflow commands and agents"
},
{
"type": "copy_file",
"source": "docs/REQUIREMENTS-WORKFLOW.md",
"target": "docs/REQUIREMENTS-WORKFLOW.md",
"description": "Copy requirements workflow documentation"
}
]
},
"essentials": {
"enabled": true,
"description": "Core development commands and utilities",
"operations": [
{
"type": "merge_dir",
"source": "development-essentials",
"description": "Merge essential development commands"
},
{
"type": "copy_file",
"source": "docs/DEVELOPMENT-COMMANDS.md",
"target": "docs/DEVELOPMENT-COMMANDS.md",
"description": "Copy development commands documentation"
}
]
}
}
}

109
config.schema.json Normal file
View File

@@ -0,0 +1,109 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://github.com/cexll/myclaude/config.schema.json",
"title": "Modular Installation Config",
"type": "object",
"additionalProperties": false,
"required": ["version", "install_dir", "log_file", "modules"],
"properties": {
"version": {
"type": "string",
"pattern": "^[0-9]+\\.[0-9]+(\\.[0-9]+)?$"
},
"install_dir": {
"type": "string",
"minLength": 1,
"description": "Target installation directory, supports ~/ expansion"
},
"log_file": {
"type": "string",
"minLength": 1
},
"modules": {
"type": "object",
"description": "可自定义的模块定义,每个模块名称可任意指定",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": { "$ref": "#/$defs/module" }
},
"additionalProperties": false,
"minProperties": 1
}
},
"$defs": {
"module": {
"type": "object",
"additionalProperties": false,
"required": ["enabled", "description", "operations"],
"properties": {
"enabled": { "type": "boolean", "default": false },
"description": { "type": "string", "minLength": 3 },
"operations": {
"type": "array",
"minItems": 1,
"items": { "$ref": "#/$defs/operation" }
}
}
},
"operation": {
"oneOf": [
{ "$ref": "#/$defs/op_copy_dir" },
{ "$ref": "#/$defs/op_copy_file" },
{ "$ref": "#/$defs/op_merge_dir" },
{ "$ref": "#/$defs/op_run_command" }
]
},
"common_operation_fields": {
"type": "object",
"properties": {
"description": { "type": "string" }
},
"additionalProperties": true
},
"op_copy_dir": {
"type": "object",
"additionalProperties": false,
"required": ["type", "source", "target"],
"properties": {
"type": { "const": "copy_dir" },
"source": { "type": "string", "minLength": 1 },
"target": { "type": "string", "minLength": 1 },
"description": { "type": "string" }
}
},
"op_copy_file": {
"type": "object",
"additionalProperties": false,
"required": ["type", "source", "target"],
"properties": {
"type": { "const": "copy_file" },
"source": { "type": "string", "minLength": 1 },
"target": { "type": "string", "minLength": 1 },
"description": { "type": "string" }
}
},
"op_merge_dir": {
"type": "object",
"additionalProperties": false,
"required": ["type", "source"],
"properties": {
"type": { "const": "merge_dir" },
"source": { "type": "string", "minLength": 1 },
"description": { "type": "string" }
}
},
"op_run_command": {
"type": "object",
"additionalProperties": false,
"required": ["type", "command"],
"properties": {
"type": { "const": "run_command" },
"command": { "type": "string", "minLength": 1 },
"description": { "type": "string" },
"env": {
"type": "object",
"additionalProperties": { "type": "string" }
}
}
}
}
}

425
install.py Normal file
View File

@@ -0,0 +1,425 @@
#!/usr/bin/env python3
"""JSON-driven modular installer.
Keep it simple: validate config, expand paths, run three operation types,
and record what happened. Designed to be small, readable, and predictable.
"""
from __future__ import annotations
import argparse
import json
import os
import shutil
import subprocess
import sys
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional
import jsonschema
DEFAULT_INSTALL_DIR = "~/.claude"
def _ensure_list(ctx: Dict[str, Any], key: str) -> List[Any]:
ctx.setdefault(key, [])
return ctx[key]
def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
"""Parse CLI arguments.
The default install dir must remain "~/.claude" to match docs/tests.
"""
parser = argparse.ArgumentParser(
description="JSON-driven modular installation system"
)
parser.add_argument(
"--install-dir",
default=DEFAULT_INSTALL_DIR,
help="Installation directory (defaults to ~/.claude)",
)
parser.add_argument(
"--module",
help="Comma-separated modules to install, or 'all' for all enabled",
)
parser.add_argument(
"--config",
default="config.json",
help="Path to configuration file",
)
parser.add_argument(
"--list-modules",
action="store_true",
help="List available modules and exit",
)
parser.add_argument(
"--force",
action="store_true",
help="Force overwrite existing files",
)
return parser.parse_args(argv)
def _load_json(path: Path) -> Any:
try:
with path.open("r", encoding="utf-8") as fh:
return json.load(fh)
except FileNotFoundError as exc:
raise FileNotFoundError(f"File not found: {path}") from exc
except json.JSONDecodeError as exc:
raise ValueError(f"Invalid JSON in {path}: {exc}") from exc
def load_config(path: str) -> Dict[str, Any]:
"""Load config and validate against JSON Schema.
Schema is searched in the config directory first, then alongside this file.
"""
config_path = Path(path).expanduser().resolve()
config = _load_json(config_path)
schema_candidates = [
config_path.parent / "config.schema.json",
Path(__file__).resolve().with_name("config.schema.json"),
]
schema_path = next((p for p in schema_candidates if p.exists()), None)
if schema_path is None:
raise FileNotFoundError("config.schema.json not found")
schema = _load_json(schema_path)
try:
jsonschema.validate(config, schema)
except jsonschema.ValidationError as exc:
raise ValueError(f"Config validation failed: {exc.message}") from exc
return config
def resolve_paths(config: Dict[str, Any], args: argparse.Namespace) -> Dict[str, Any]:
"""Resolve all filesystem paths to absolute Path objects."""
config_dir = Path(args.config).expanduser().resolve().parent
if args.install_dir and args.install_dir != DEFAULT_INSTALL_DIR:
install_dir_raw = args.install_dir
elif config.get("install_dir"):
install_dir_raw = config.get("install_dir")
else:
install_dir_raw = DEFAULT_INSTALL_DIR
install_dir = Path(install_dir_raw).expanduser().resolve()
log_file_raw = config.get("log_file", "install.log")
log_file = Path(log_file_raw).expanduser()
if not log_file.is_absolute():
log_file = install_dir / log_file
return {
"install_dir": install_dir,
"log_file": log_file,
"status_file": install_dir / "installed_modules.json",
"config_dir": config_dir,
"force": bool(getattr(args, "force", False)),
"applied_paths": [],
"status_backup": None,
}
def list_modules(config: Dict[str, Any]) -> None:
print("Available Modules:")
print(f"{'Name':<15} {'Enabled':<8} Description")
print("-" * 60)
for name, cfg in config.get("modules", {}).items():
enabled = "" if cfg.get("enabled", False) else ""
desc = cfg.get("description", "")
print(f"{name:<15} {enabled:<8} {desc}")
def select_modules(config: Dict[str, Any], module_arg: Optional[str]) -> Dict[str, Any]:
modules = config.get("modules", {})
if not module_arg:
return {k: v for k, v in modules.items() if v.get("enabled", False)}
if module_arg.strip().lower() == "all":
return {k: v for k, v in modules.items() if v.get("enabled", False)}
selected: Dict[str, Any] = {}
for name in (part.strip() for part in module_arg.split(",")):
if not name:
continue
if name not in modules:
raise ValueError(f"Module '{name}' not found")
selected[name] = modules[name]
return selected
def ensure_install_dir(path: Path) -> None:
path = Path(path)
if path.exists() and not path.is_dir():
raise NotADirectoryError(f"Install path exists and is not a directory: {path}")
path.mkdir(parents=True, exist_ok=True)
if not os.access(path, os.W_OK):
raise PermissionError(f"No write permission for install dir: {path}")
def execute_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[str, Any]:
result: Dict[str, Any] = {
"module": name,
"status": "success",
"operations": [],
"installed_at": datetime.now().isoformat(),
}
for op in cfg.get("operations", []):
op_type = op.get("type")
try:
if op_type == "copy_dir":
op_copy_dir(op, ctx)
elif op_type == "copy_file":
op_copy_file(op, ctx)
elif op_type == "merge_dir":
op_merge_dir(op, ctx)
elif op_type == "run_command":
op_run_command(op, ctx)
else:
raise ValueError(f"Unknown operation type: {op_type}")
result["operations"].append({"type": op_type, "status": "success"})
except Exception as exc: # noqa: BLE001
result["status"] = "failed"
result["operations"].append(
{"type": op_type, "status": "failed", "error": str(exc)}
)
write_log(
{
"level": "ERROR",
"message": f"Module {name} failed on {op_type}: {exc}",
},
ctx,
)
raise
return result
def _source_path(op: Dict[str, Any], ctx: Dict[str, Any]) -> Path:
return (ctx["config_dir"] / op["source"]).expanduser().resolve()
def _target_path(op: Dict[str, Any], ctx: Dict[str, Any]) -> Path:
return (ctx["install_dir"] / op["target"]).expanduser().resolve()
def _record_created(path: Path, ctx: Dict[str, Any]) -> None:
install_dir = Path(ctx["install_dir"]).resolve()
resolved = Path(path).resolve()
if resolved == install_dir or install_dir not in resolved.parents:
return
applied = _ensure_list(ctx, "applied_paths")
if resolved not in applied:
applied.append(resolved)
def op_copy_dir(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
src = _source_path(op, ctx)
dst = _target_path(op, ctx)
existed_before = dst.exists()
if existed_before and not ctx.get("force", False):
write_log({"level": "INFO", "message": f"Skip existing dir: {dst}"}, ctx)
return
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copytree(src, dst, dirs_exist_ok=True)
if not existed_before:
_record_created(dst, ctx)
write_log({"level": "INFO", "message": f"Copied dir {src} -> {dst}"}, ctx)
def op_merge_dir(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
"""Merge source dir's subdirs (commands/, agents/, etc.) into install_dir."""
src = _source_path(op, ctx)
install_dir = ctx["install_dir"]
force = ctx.get("force", False)
merged = []
for subdir in src.iterdir():
if not subdir.is_dir():
continue
target_subdir = install_dir / subdir.name
target_subdir.mkdir(parents=True, exist_ok=True)
for f in subdir.iterdir():
if f.is_file():
dst = target_subdir / f.name
if dst.exists() and not force:
continue
shutil.copy2(f, dst)
merged.append(f"{subdir.name}/{f.name}")
write_log({"level": "INFO", "message": f"Merged {src.name}: {', '.join(merged) or 'no files'}"}, ctx)
def op_copy_file(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
src = _source_path(op, ctx)
dst = _target_path(op, ctx)
existed_before = dst.exists()
if existed_before and not ctx.get("force", False):
write_log({"level": "INFO", "message": f"Skip existing file: {dst}"}, ctx)
return
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(src, dst)
if not existed_before:
_record_created(dst, ctx)
write_log({"level": "INFO", "message": f"Copied file {src} -> {dst}"}, ctx)
def op_run_command(op: Dict[str, Any], ctx: Dict[str, Any]) -> None:
env = os.environ.copy()
for key, value in op.get("env", {}).items():
env[key] = value.replace("${install_dir}", str(ctx["install_dir"]))
command = op.get("command", "")
result = subprocess.run(
command,
shell=True,
cwd=ctx["config_dir"],
env=env,
capture_output=True,
text=True,
)
write_log(
{
"level": "INFO",
"message": f"Command: {command}",
"stdout": result.stdout,
"stderr": result.stderr,
"returncode": result.returncode,
},
ctx,
)
if result.returncode != 0:
raise RuntimeError(f"Command failed with code {result.returncode}: {command}")
def write_log(entry: Dict[str, Any], ctx: Dict[str, Any]) -> None:
log_path = Path(ctx["log_file"])
log_path.parent.mkdir(parents=True, exist_ok=True)
ts = datetime.now().isoformat()
level = entry.get("level", "INFO")
message = entry.get("message", "")
with log_path.open("a", encoding="utf-8") as fh:
fh.write(f"[{ts}] {level}: {message}\n")
for key in ("stdout", "stderr", "returncode"):
if key in entry and entry[key] not in (None, ""):
fh.write(f" {key}: {entry[key]}\n")
def write_status(results: List[Dict[str, Any]], ctx: Dict[str, Any]) -> None:
status = {
"installed_at": datetime.now().isoformat(),
"modules": {item["module"]: item for item in results},
}
status_path = Path(ctx["status_file"])
status_path.parent.mkdir(parents=True, exist_ok=True)
with status_path.open("w", encoding="utf-8") as fh:
json.dump(status, fh, indent=2, ensure_ascii=False)
def prepare_status_backup(ctx: Dict[str, Any]) -> None:
status_path = Path(ctx["status_file"])
if status_path.exists():
backup = status_path.with_suffix(".json.bak")
backup.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(status_path, backup)
ctx["status_backup"] = backup
def rollback(ctx: Dict[str, Any]) -> None:
write_log({"level": "WARNING", "message": "Rolling back installation"}, ctx)
install_dir = Path(ctx["install_dir"]).resolve()
for path in reversed(ctx.get("applied_paths", [])):
resolved = Path(path).resolve()
try:
if resolved == install_dir or install_dir not in resolved.parents:
continue
if resolved.is_dir():
shutil.rmtree(resolved, ignore_errors=True)
else:
resolved.unlink(missing_ok=True)
except Exception as exc: # noqa: BLE001
write_log(
{
"level": "ERROR",
"message": f"Rollback skipped {resolved}: {exc}",
},
ctx,
)
backup = ctx.get("status_backup")
if backup and Path(backup).exists():
shutil.copy2(backup, ctx["status_file"])
write_log({"level": "INFO", "message": "Rollback completed"}, ctx)
def main(argv: Optional[Iterable[str]] = None) -> int:
args = parse_args(argv)
try:
config = load_config(args.config)
except Exception as exc: # noqa: BLE001
print(f"Error loading config: {exc}", file=sys.stderr)
return 1
ctx = resolve_paths(config, args)
if getattr(args, "list_modules", False):
list_modules(config)
return 0
modules = select_modules(config, args.module)
try:
ensure_install_dir(ctx["install_dir"])
except Exception as exc: # noqa: BLE001
print(f"Failed to prepare install dir: {exc}", file=sys.stderr)
return 1
prepare_status_backup(ctx)
results: List[Dict[str, Any]] = []
for name, cfg in modules.items():
try:
results.append(execute_module(name, cfg, ctx))
except Exception: # noqa: BLE001
if not args.force:
rollback(ctx)
return 1
rollback(ctx)
results.append(
{
"module": name,
"status": "failed",
"operations": [],
"installed_at": datetime.now().isoformat(),
}
)
break
write_status(results, ctx)
return 0
if __name__ == "__main__": # pragma: no cover
sys.exit(main())

76
tests/test_config.cover Normal file
View File

@@ -0,0 +1,76 @@
1: import copy
1: import json
1: import unittest
1: from pathlib import Path
1: import jsonschema
1: CONFIG_PATH = Path(__file__).resolve().parents[1] / "config.json"
1: SCHEMA_PATH = Path(__file__).resolve().parents[1] / "config.schema.json"
1: ROOT = CONFIG_PATH.parent
1: def load_config():
with CONFIG_PATH.open(encoding="utf-8") as f:
return json.load(f)
1: def load_schema():
with SCHEMA_PATH.open(encoding="utf-8") as f:
return json.load(f)
2: class ConfigSchemaTest(unittest.TestCase):
1: def test_config_matches_schema(self):
config = load_config()
schema = load_schema()
jsonschema.validate(config, schema)
1: def test_required_modules_present(self):
modules = load_config()["modules"]
self.assertEqual(set(modules.keys()), {"dev", "bmad", "requirements", "essentials", "advanced"})
1: def test_enabled_defaults_and_flags(self):
modules = load_config()["modules"]
self.assertTrue(modules["dev"]["enabled"])
self.assertTrue(modules["essentials"]["enabled"])
self.assertFalse(modules["bmad"]["enabled"])
self.assertFalse(modules["requirements"]["enabled"])
self.assertFalse(modules["advanced"]["enabled"])
1: def test_operations_have_expected_shape(self):
config = load_config()
for name, module in config["modules"].items():
self.assertTrue(module["operations"], f"{name} should declare at least one operation")
for op in module["operations"]:
self.assertIn("type", op)
if op["type"] in {"copy_dir", "copy_file"}:
self.assertTrue(op.get("source"), f"{name} operation missing source")
self.assertTrue(op.get("target"), f"{name} operation missing target")
elif op["type"] == "run_command":
self.assertTrue(op.get("command"), f"{name} run_command missing command")
if "env" in op:
self.assertIsInstance(op["env"], dict)
else:
self.fail(f"Unsupported operation type: {op['type']}")
1: def test_operation_sources_exist_on_disk(self):
config = load_config()
for module in config["modules"].values():
for op in module["operations"]:
if op["type"] in {"copy_dir", "copy_file"}:
path = (ROOT / op["source"]).expanduser()
self.assertTrue(path.exists(), f"Source path not found: {path}")
1: def test_schema_rejects_invalid_operation_type(self):
config = load_config()
invalid = copy.deepcopy(config)
invalid["modules"]["dev"]["operations"][0]["type"] = "unknown_op"
schema = load_schema()
with self.assertRaises(jsonschema.exceptions.ValidationError):
jsonschema.validate(invalid, schema)
1: if __name__ == "__main__":
1: unittest.main()

76
tests/test_config.py Normal file
View File

@@ -0,0 +1,76 @@
import copy
import json
import unittest
from pathlib import Path
import jsonschema
CONFIG_PATH = Path(__file__).resolve().parents[1] / "config.json"
SCHEMA_PATH = Path(__file__).resolve().parents[1] / "config.schema.json"
ROOT = CONFIG_PATH.parent
def load_config():
with CONFIG_PATH.open(encoding="utf-8") as f:
return json.load(f)
def load_schema():
with SCHEMA_PATH.open(encoding="utf-8") as f:
return json.load(f)
class ConfigSchemaTest(unittest.TestCase):
def test_config_matches_schema(self):
config = load_config()
schema = load_schema()
jsonschema.validate(config, schema)
def test_required_modules_present(self):
modules = load_config()["modules"]
self.assertEqual(set(modules.keys()), {"dev", "bmad", "requirements", "essentials", "advanced"})
def test_enabled_defaults_and_flags(self):
modules = load_config()["modules"]
self.assertTrue(modules["dev"]["enabled"])
self.assertTrue(modules["essentials"]["enabled"])
self.assertFalse(modules["bmad"]["enabled"])
self.assertFalse(modules["requirements"]["enabled"])
self.assertFalse(modules["advanced"]["enabled"])
def test_operations_have_expected_shape(self):
config = load_config()
for name, module in config["modules"].items():
self.assertTrue(module["operations"], f"{name} should declare at least one operation")
for op in module["operations"]:
self.assertIn("type", op)
if op["type"] in {"copy_dir", "copy_file"}:
self.assertTrue(op.get("source"), f"{name} operation missing source")
self.assertTrue(op.get("target"), f"{name} operation missing target")
elif op["type"] == "run_command":
self.assertTrue(op.get("command"), f"{name} run_command missing command")
if "env" in op:
self.assertIsInstance(op["env"], dict)
else:
self.fail(f"Unsupported operation type: {op['type']}")
def test_operation_sources_exist_on_disk(self):
config = load_config()
for module in config["modules"].values():
for op in module["operations"]:
if op["type"] in {"copy_dir", "copy_file"}:
path = (ROOT / op["source"]).expanduser()
self.assertTrue(path.exists(), f"Source path not found: {path}")
def test_schema_rejects_invalid_operation_type(self):
config = load_config()
invalid = copy.deepcopy(config)
invalid["modules"]["dev"]["operations"][0]["type"] = "unknown_op"
schema = load_schema()
with self.assertRaises(jsonschema.exceptions.ValidationError):
jsonschema.validate(invalid, schema)
if __name__ == "__main__":
unittest.main()

458
tests/test_install.py Normal file
View File

@@ -0,0 +1,458 @@
import json
import os
import shutil
import sys
from pathlib import Path
import pytest
import install
ROOT = Path(__file__).resolve().parents[1]
SCHEMA_PATH = ROOT / "config.schema.json"
def write_config(tmp_path: Path, config: dict) -> Path:
cfg_path = tmp_path / "config.json"
cfg_path.write_text(json.dumps(config), encoding="utf-8")
shutil.copy(SCHEMA_PATH, tmp_path / "config.schema.json")
return cfg_path
@pytest.fixture()
def valid_config(tmp_path):
sample_file = tmp_path / "sample.txt"
sample_file.write_text("hello", encoding="utf-8")
sample_dir = tmp_path / "sample_dir"
sample_dir.mkdir()
(sample_dir / "f.txt").write_text("dir", encoding="utf-8")
config = {
"version": "1.0",
"install_dir": "~/.fromconfig",
"log_file": "install.log",
"modules": {
"dev": {
"enabled": True,
"description": "dev module",
"operations": [
{"type": "copy_dir", "source": "sample_dir", "target": "devcopy"}
],
},
"bmad": {
"enabled": False,
"description": "bmad",
"operations": [
{"type": "copy_file", "source": "sample.txt", "target": "bmad.txt"}
],
},
"requirements": {
"enabled": False,
"description": "reqs",
"operations": [
{"type": "copy_file", "source": "sample.txt", "target": "req.txt"}
],
},
"essentials": {
"enabled": True,
"description": "ess",
"operations": [
{"type": "copy_file", "source": "sample.txt", "target": "ess.txt"}
],
},
"advanced": {
"enabled": False,
"description": "adv",
"operations": [
{"type": "copy_file", "source": "sample.txt", "target": "adv.txt"}
],
},
},
}
cfg_path = write_config(tmp_path, config)
return cfg_path, config
def make_ctx(tmp_path: Path) -> dict:
install_dir = tmp_path / "install"
return {
"install_dir": install_dir,
"log_file": install_dir / "install.log",
"status_file": install_dir / "installed_modules.json",
"config_dir": tmp_path,
"force": False,
}
def test_parse_args_defaults():
args = install.parse_args([])
assert args.install_dir == install.DEFAULT_INSTALL_DIR
assert args.config == "config.json"
assert args.module is None
assert args.list_modules is False
assert args.force is False
def test_parse_args_custom():
args = install.parse_args(
[
"--install-dir",
"/tmp/custom",
"--module",
"dev,bmad",
"--config",
"/tmp/cfg.json",
"--list-modules",
"--force",
]
)
assert args.install_dir == "/tmp/custom"
assert args.module == "dev,bmad"
assert args.config == "/tmp/cfg.json"
assert args.list_modules is True
assert args.force is True
def test_load_config_success(valid_config):
cfg_path, config_data = valid_config
loaded = install.load_config(str(cfg_path))
assert loaded["modules"]["dev"]["description"] == config_data["modules"]["dev"]["description"]
def test_load_config_invalid_json(tmp_path):
bad = tmp_path / "bad.json"
bad.write_text("{broken", encoding="utf-8")
shutil.copy(SCHEMA_PATH, tmp_path / "config.schema.json")
with pytest.raises(ValueError):
install.load_config(str(bad))
def test_load_config_schema_error(tmp_path):
cfg = tmp_path / "cfg.json"
cfg.write_text(json.dumps({"version": "1.0"}), encoding="utf-8")
shutil.copy(SCHEMA_PATH, tmp_path / "config.schema.json")
with pytest.raises(ValueError):
install.load_config(str(cfg))
def test_resolve_paths_respects_priority(tmp_path):
config = {
"install_dir": str(tmp_path / "from_config"),
"log_file": "logs/install.log",
"modules": {},
"version": "1.0",
}
cfg_path = write_config(tmp_path, config)
args = install.parse_args(["--config", str(cfg_path)])
ctx = install.resolve_paths(config, args)
assert ctx["install_dir"] == (tmp_path / "from_config").resolve()
assert ctx["log_file"] == (tmp_path / "from_config" / "logs" / "install.log").resolve()
assert ctx["config_dir"] == tmp_path.resolve()
cli_args = install.parse_args(
["--install-dir", str(tmp_path / "cli_dir"), "--config", str(cfg_path)]
)
ctx_cli = install.resolve_paths(config, cli_args)
assert ctx_cli["install_dir"] == (tmp_path / "cli_dir").resolve()
def test_list_modules_output(valid_config, capsys):
_, config_data = valid_config
install.list_modules(config_data)
captured = capsys.readouterr().out
assert "dev" in captured
assert "essentials" in captured
assert "" in captured
def test_select_modules_behaviour(valid_config):
_, config_data = valid_config
selected_default = install.select_modules(config_data, None)
assert set(selected_default.keys()) == {"dev", "essentials"}
selected_specific = install.select_modules(config_data, "bmad")
assert set(selected_specific.keys()) == {"bmad"}
with pytest.raises(ValueError):
install.select_modules(config_data, "missing")
def test_ensure_install_dir(tmp_path, monkeypatch):
target = tmp_path / "install_here"
install.ensure_install_dir(target)
assert target.is_dir()
file_path = tmp_path / "conflict"
file_path.write_text("x", encoding="utf-8")
with pytest.raises(NotADirectoryError):
install.ensure_install_dir(file_path)
blocked = tmp_path / "blocked"
real_access = os.access
def fake_access(path, mode):
if Path(path) == blocked:
return False
return real_access(path, mode)
monkeypatch.setattr(os, "access", fake_access)
with pytest.raises(PermissionError):
install.ensure_install_dir(blocked)
def test_op_copy_dir_respects_force(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
src = tmp_path / "src"
src.mkdir()
(src / "a.txt").write_text("one", encoding="utf-8")
op = {"type": "copy_dir", "source": "src", "target": "dest"}
install.op_copy_dir(op, ctx)
target_file = ctx["install_dir"] / "dest" / "a.txt"
assert target_file.read_text(encoding="utf-8") == "one"
(src / "a.txt").write_text("two", encoding="utf-8")
install.op_copy_dir(op, ctx)
assert target_file.read_text(encoding="utf-8") == "one"
ctx["force"] = True
install.op_copy_dir(op, ctx)
assert target_file.read_text(encoding="utf-8") == "two"
def test_op_copy_file_behaviour(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
src = tmp_path / "file.txt"
src.write_text("first", encoding="utf-8")
op = {"type": "copy_file", "source": "file.txt", "target": "out/file.txt"}
install.op_copy_file(op, ctx)
dst = ctx["install_dir"] / "out" / "file.txt"
assert dst.read_text(encoding="utf-8") == "first"
src.write_text("second", encoding="utf-8")
install.op_copy_file(op, ctx)
assert dst.read_text(encoding="utf-8") == "first"
ctx["force"] = True
install.op_copy_file(op, ctx)
assert dst.read_text(encoding="utf-8") == "second"
def test_op_run_command_success(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
install.op_run_command({"type": "run_command", "command": "echo hello"}, ctx)
log_content = ctx["log_file"].read_text(encoding="utf-8")
assert "hello" in log_content
def test_op_run_command_failure(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
with pytest.raises(RuntimeError):
install.op_run_command(
{"type": "run_command", "command": f"{sys.executable} -c 'import sys; sys.exit(2)'"},
ctx,
)
log_content = ctx["log_file"].read_text(encoding="utf-8")
assert "returncode: 2" in log_content
def test_execute_module_success(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
src = tmp_path / "src.txt"
src.write_text("data", encoding="utf-8")
cfg = {"operations": [{"type": "copy_file", "source": "src.txt", "target": "out.txt"}]}
result = install.execute_module("demo", cfg, ctx)
assert result["status"] == "success"
assert (ctx["install_dir"] / "out.txt").read_text(encoding="utf-8") == "data"
def test_execute_module_failure_logs_and_stops(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
cfg = {"operations": [{"type": "unknown", "source": "", "target": ""}]}
with pytest.raises(ValueError):
install.execute_module("demo", cfg, ctx)
log_content = ctx["log_file"].read_text(encoding="utf-8")
assert "failed on unknown" in log_content
def test_write_log_and_status(tmp_path):
ctx = make_ctx(tmp_path)
install.ensure_install_dir(ctx["install_dir"])
install.write_log({"level": "INFO", "message": "hello"}, ctx)
content = ctx["log_file"].read_text(encoding="utf-8")
assert "hello" in content
results = [
{"module": "dev", "status": "success", "operations": [], "installed_at": "ts"}
]
install.write_status(results, ctx)
status_data = json.loads(ctx["status_file"].read_text(encoding="utf-8"))
assert status_data["modules"]["dev"]["status"] == "success"
def test_main_success(valid_config, tmp_path):
cfg_path, _ = valid_config
install_dir = tmp_path / "install_final"
rc = install.main(
[
"--config",
str(cfg_path),
"--install-dir",
str(install_dir),
"--module",
"dev",
]
)
assert rc == 0
assert (install_dir / "devcopy" / "f.txt").exists()
assert (install_dir / "installed_modules.json").exists()
def test_main_failure_without_force(tmp_path):
cfg = {
"version": "1.0",
"install_dir": "~/.claude",
"log_file": "install.log",
"modules": {
"dev": {
"enabled": True,
"description": "dev",
"operations": [
{
"type": "run_command",
"command": f"{sys.executable} -c 'import sys; sys.exit(3)'",
}
],
},
"bmad": {
"enabled": False,
"description": "bmad",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "t.txt"}
],
},
"requirements": {
"enabled": False,
"description": "reqs",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "r.txt"}
],
},
"essentials": {
"enabled": False,
"description": "ess",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "e.txt"}
],
},
"advanced": {
"enabled": False,
"description": "adv",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "a.txt"}
],
},
},
}
cfg_path = write_config(tmp_path, cfg)
install_dir = tmp_path / "fail_install"
rc = install.main(
[
"--config",
str(cfg_path),
"--install-dir",
str(install_dir),
"--module",
"dev",
]
)
assert rc == 1
assert not (install_dir / "installed_modules.json").exists()
def test_main_force_records_failure(tmp_path):
cfg = {
"version": "1.0",
"install_dir": "~/.claude",
"log_file": "install.log",
"modules": {
"dev": {
"enabled": True,
"description": "dev",
"operations": [
{
"type": "run_command",
"command": f"{sys.executable} -c 'import sys; sys.exit(4)'",
}
],
},
"bmad": {
"enabled": False,
"description": "bmad",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "t.txt"}
],
},
"requirements": {
"enabled": False,
"description": "reqs",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "r.txt"}
],
},
"essentials": {
"enabled": False,
"description": "ess",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "e.txt"}
],
},
"advanced": {
"enabled": False,
"description": "adv",
"operations": [
{"type": "copy_file", "source": "s.txt", "target": "a.txt"}
],
},
},
}
cfg_path = write_config(tmp_path, cfg)
install_dir = tmp_path / "force_install"
rc = install.main(
[
"--config",
str(cfg_path),
"--install-dir",
str(install_dir),
"--module",
"dev",
"--force",
]
)
assert rc == 0
status = json.loads((install_dir / "installed_modules.json").read_text(encoding="utf-8"))
assert status["modules"]["dev"]["status"] == "failed"

224
tests/test_modules.py Normal file
View File

@@ -0,0 +1,224 @@
import json
import shutil
import sys
from pathlib import Path
import pytest
import install
ROOT = Path(__file__).resolve().parents[1]
SCHEMA_PATH = ROOT / "config.schema.json"
def _write_schema(target_dir: Path) -> None:
shutil.copy(SCHEMA_PATH, target_dir / "config.schema.json")
def _base_config(install_dir: Path, modules: dict) -> dict:
return {
"version": "1.0",
"install_dir": str(install_dir),
"log_file": "install.log",
"modules": modules,
}
def _prepare_env(tmp_path: Path, modules: dict) -> tuple[Path, Path, Path]:
"""Create a temp config directory with schema and config.json."""
config_dir = tmp_path / "config"
install_dir = tmp_path / "install"
config_dir.mkdir()
_write_schema(config_dir)
cfg_path = config_dir / "config.json"
cfg_path.write_text(
json.dumps(_base_config(install_dir, modules)), encoding="utf-8"
)
return cfg_path, install_dir, config_dir
def _sample_sources(config_dir: Path) -> dict:
sample_dir = config_dir / "sample_dir"
sample_dir.mkdir()
(sample_dir / "nested.txt").write_text("dir-content", encoding="utf-8")
sample_file = config_dir / "sample.txt"
sample_file.write_text("file-content", encoding="utf-8")
return {"dir": sample_dir, "file": sample_file}
def _read_status(install_dir: Path) -> dict:
return json.loads((install_dir / "installed_modules.json").read_text("utf-8"))
def test_single_module_full_flow(tmp_path):
cfg_path, install_dir, config_dir = _prepare_env(
tmp_path,
{
"solo": {
"enabled": True,
"description": "single module",
"operations": [
{"type": "copy_dir", "source": "sample_dir", "target": "payload"},
{
"type": "copy_file",
"source": "sample.txt",
"target": "payload/sample.txt",
},
{
"type": "run_command",
"command": f"{sys.executable} -c \"from pathlib import Path; Path('run.txt').write_text('ok', encoding='utf-8')\"",
},
],
}
},
)
_sample_sources(config_dir)
rc = install.main(["--config", str(cfg_path), "--module", "solo"])
assert rc == 0
assert (install_dir / "payload" / "nested.txt").read_text(encoding="utf-8") == "dir-content"
assert (install_dir / "payload" / "sample.txt").read_text(encoding="utf-8") == "file-content"
assert (install_dir / "run.txt").read_text(encoding="utf-8") == "ok"
status = _read_status(install_dir)
assert status["modules"]["solo"]["status"] == "success"
assert len(status["modules"]["solo"]["operations"]) == 3
def test_multi_module_install_and_status(tmp_path):
modules = {
"alpha": {
"enabled": True,
"description": "alpha",
"operations": [
{
"type": "copy_file",
"source": "sample.txt",
"target": "alpha.txt",
}
],
},
"beta": {
"enabled": True,
"description": "beta",
"operations": [
{
"type": "copy_dir",
"source": "sample_dir",
"target": "beta_dir",
}
],
},
}
cfg_path, install_dir, config_dir = _prepare_env(tmp_path, modules)
_sample_sources(config_dir)
rc = install.main(["--config", str(cfg_path)])
assert rc == 0
assert (install_dir / "alpha.txt").read_text(encoding="utf-8") == "file-content"
assert (install_dir / "beta_dir" / "nested.txt").exists()
status = _read_status(install_dir)
assert set(status["modules"].keys()) == {"alpha", "beta"}
assert all(mod["status"] == "success" for mod in status["modules"].values())
def test_force_overwrites_existing_files(tmp_path):
modules = {
"forcey": {
"enabled": True,
"description": "force copy",
"operations": [
{
"type": "copy_file",
"source": "sample.txt",
"target": "target.txt",
}
],
}
}
cfg_path, install_dir, config_dir = _prepare_env(tmp_path, modules)
sources = _sample_sources(config_dir)
install.main(["--config", str(cfg_path), "--module", "forcey"])
assert (install_dir / "target.txt").read_text(encoding="utf-8") == "file-content"
sources["file"].write_text("new-content", encoding="utf-8")
rc = install.main(["--config", str(cfg_path), "--module", "forcey", "--force"])
assert rc == 0
assert (install_dir / "target.txt").read_text(encoding="utf-8") == "new-content"
status = _read_status(install_dir)
assert status["modules"]["forcey"]["status"] == "success"
def test_failure_triggers_rollback_and_restores_status(tmp_path):
# First successful run to create a known-good status file.
ok_modules = {
"stable": {
"enabled": True,
"description": "stable",
"operations": [
{
"type": "copy_file",
"source": "sample.txt",
"target": "stable.txt",
}
],
}
}
cfg_path, install_dir, config_dir = _prepare_env(tmp_path, ok_modules)
_sample_sources(config_dir)
assert install.main(["--config", str(cfg_path)]) == 0
pre_status = _read_status(install_dir)
assert "stable" in pre_status["modules"]
# Rewrite config to introduce a failing module.
failing_modules = {
**ok_modules,
"broken": {
"enabled": True,
"description": "will fail",
"operations": [
{
"type": "copy_file",
"source": "sample.txt",
"target": "broken.txt",
},
{
"type": "run_command",
"command": f"{sys.executable} -c 'import sys; sys.exit(5)'",
},
],
},
}
cfg_path.write_text(
json.dumps(_base_config(install_dir, failing_modules)), encoding="utf-8"
)
rc = install.main(["--config", str(cfg_path)])
assert rc == 1
# The failed module's file should have been removed by rollback.
assert not (install_dir / "broken.txt").exists()
# Previously installed files remain.
assert (install_dir / "stable.txt").exists()
restored_status = _read_status(install_dir)
assert restored_status == pre_status
log_content = (install_dir / "install.log").read_text(encoding="utf-8")
assert "Rolling back" in log_content