mirror of
https://github.com/cexll/myclaude.git
synced 2026-02-12 03:27:47 +08:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7240e08900 | ||
|
|
e122d8ff25 | ||
|
|
6985a30a6a | ||
|
|
dd4c12b8e2 | ||
|
|
a88315d92d | ||
|
|
d1f13b3379 | ||
|
|
5d362852ab | ||
|
|
238c7b9a13 | ||
|
|
0986fa82ee |
@@ -42,6 +42,13 @@
|
|||||||
"version": "5.6.1",
|
"version": "5.6.1",
|
||||||
"source": "./development-essentials",
|
"source": "./development-essentials",
|
||||||
"category": "productivity"
|
"category": "productivity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sparv",
|
||||||
|
"description": "Minimal SPARV workflow (Specify→Plan→Act→Review→Vault) with 10-point spec gate, unified journal, 2-action saves, 3-failure protocol, and EHRB risk detection",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"source": "./skills/sparv",
|
||||||
|
"category": "development"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
[](https://www.gnu.org/licenses/agpl-3.0)
|
[](https://www.gnu.org/licenses/agpl-3.0)
|
||||||
[](https://claude.ai/code)
|
[](https://claude.ai/code)
|
||||||
[](https://github.com/cexll/myclaude)
|
[](https://github.com/cexll/myclaude)
|
||||||
|
|
||||||
> AI-powered development automation with multi-backend execution (Codex/Claude/Gemini)
|
> AI-powered development automation with multi-backend execution (Codex/Claude/Gemini)
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
[](https://www.gnu.org/licenses/agpl-3.0)
|
[](https://www.gnu.org/licenses/agpl-3.0)
|
||||||
[](https://claude.ai/code)
|
[](https://claude.ai/code)
|
||||||
[](https://github.com/cexll/myclaude)
|
[](https://github.com/cexll/myclaude)
|
||||||
|
|
||||||
> AI 驱动的开发自动化 - 多后端执行架构 (Codex/Claude/Gemini)
|
> AI 驱动的开发自动化 - 多后端执行架构 (Codex/Claude/Gemini)
|
||||||
|
|
||||||
|
|||||||
@@ -188,6 +188,15 @@ func TestOpencodeBackend_BuildArgs(t *testing.T) {
|
|||||||
t.Errorf("got %v, want %v", got, want)
|
t.Errorf("got %v, want %v", got, want)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("stdin mode omits dash", func(t *testing.T) {
|
||||||
|
cfg := &Config{Mode: "new"}
|
||||||
|
got := backend.BuildArgs(cfg, "-")
|
||||||
|
want := []string{"run", "--format", "json"}
|
||||||
|
if !reflect.DeepEqual(got, want) {
|
||||||
|
t.Errorf("got %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOpencodeBackend_Interface(t *testing.T) {
|
func TestOpencodeBackend_Interface(t *testing.T) {
|
||||||
|
|||||||
@@ -204,7 +204,10 @@ func (OpencodeBackend) BuildArgs(cfg *Config, targetArg string) []string {
|
|||||||
if cfg.Mode == "resume" && cfg.SessionID != "" {
|
if cfg.Mode == "resume" && cfg.SessionID != "" {
|
||||||
args = append(args, "-s", cfg.SessionID)
|
args = append(args, "-s", cfg.SessionID)
|
||||||
}
|
}
|
||||||
args = append(args, "--format", "json", targetArg)
|
args = append(args, "--format", "json")
|
||||||
|
if targetArg != "-" {
|
||||||
|
args = append(args, targetArg)
|
||||||
|
}
|
||||||
return args
|
return args
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
14
config.json
14
config.json
@@ -93,7 +93,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"essentials": {
|
"essentials": {
|
||||||
"enabled": true,
|
"enabled": false,
|
||||||
"description": "Core development commands and utilities",
|
"description": "Core development commands and utilities",
|
||||||
"operations": [
|
"operations": [
|
||||||
{
|
{
|
||||||
@@ -156,6 +156,18 @@
|
|||||||
"description": "Install develop agent prompt"
|
"description": "Install develop agent prompt"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"sparv": {
|
||||||
|
"enabled": false,
|
||||||
|
"description": "SPARV workflow (Specify→Plan→Act→Review→Vault) with 10-point gate",
|
||||||
|
"operations": [
|
||||||
|
{
|
||||||
|
"type": "copy_dir",
|
||||||
|
"source": "skills/sparv",
|
||||||
|
"target": "skills/sparv",
|
||||||
|
"description": "Install sparv skill with all scripts and hooks"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
398
install.py
398
install.py
@@ -46,7 +46,7 @@ def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
|||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--module",
|
"--module",
|
||||||
help="Comma-separated modules to install, or 'all' for all enabled",
|
help="Comma-separated modules to install/uninstall, or 'all'",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--config",
|
"--config",
|
||||||
@@ -58,6 +58,16 @@ def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="List available modules and exit",
|
help="List available modules and exit",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--status",
|
||||||
|
action="store_true",
|
||||||
|
help="Show installation status of all modules",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--uninstall",
|
||||||
|
action="store_true",
|
||||||
|
help="Uninstall specified modules",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--force",
|
"--force",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -166,22 +176,93 @@ def resolve_paths(config: Dict[str, Any], args: argparse.Namespace) -> Dict[str,
|
|||||||
|
|
||||||
def list_modules(config: Dict[str, Any]) -> None:
|
def list_modules(config: Dict[str, Any]) -> None:
|
||||||
print("Available Modules:")
|
print("Available Modules:")
|
||||||
print(f"{'Name':<15} {'Default':<8} Description")
|
print(f"{'#':<3} {'Name':<15} {'Default':<8} Description")
|
||||||
print("-" * 60)
|
print("-" * 65)
|
||||||
for name, cfg in config.get("modules", {}).items():
|
for idx, (name, cfg) in enumerate(config.get("modules", {}).items(), 1):
|
||||||
default = "✓" if cfg.get("enabled", False) else "✗"
|
default = "✓" if cfg.get("enabled", False) else "✗"
|
||||||
desc = cfg.get("description", "")
|
desc = cfg.get("description", "")
|
||||||
print(f"{name:<15} {default:<8} {desc}")
|
print(f"{idx:<3} {name:<15} {default:<8} {desc}")
|
||||||
print("\n✓ = installed by default when no --module specified")
|
print("\n✓ = installed by default when no --module specified")
|
||||||
|
|
||||||
|
|
||||||
|
def load_installed_status(ctx: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Load installed modules status from status file."""
|
||||||
|
status_path = Path(ctx["status_file"])
|
||||||
|
if status_path.exists():
|
||||||
|
try:
|
||||||
|
return _load_json(status_path)
|
||||||
|
except (ValueError, FileNotFoundError):
|
||||||
|
return {"modules": {}}
|
||||||
|
return {"modules": {}}
|
||||||
|
|
||||||
|
|
||||||
|
def check_module_installed(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> bool:
|
||||||
|
"""Check if a module is installed by verifying its files exist."""
|
||||||
|
install_dir = ctx["install_dir"]
|
||||||
|
|
||||||
|
for op in cfg.get("operations", []):
|
||||||
|
op_type = op.get("type")
|
||||||
|
if op_type in ("copy_dir", "copy_file"):
|
||||||
|
target = (install_dir / op["target"]).expanduser().resolve()
|
||||||
|
if target.exists():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_installed_modules(config: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[str, bool]:
|
||||||
|
"""Get installation status of all modules by checking files."""
|
||||||
|
result = {}
|
||||||
|
modules = config.get("modules", {})
|
||||||
|
|
||||||
|
# First check status file
|
||||||
|
status = load_installed_status(ctx)
|
||||||
|
status_modules = status.get("modules", {})
|
||||||
|
|
||||||
|
for name, cfg in modules.items():
|
||||||
|
# Check both status file and filesystem
|
||||||
|
in_status = name in status_modules
|
||||||
|
files_exist = check_module_installed(name, cfg, ctx)
|
||||||
|
result[name] = in_status or files_exist
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def list_modules_with_status(config: Dict[str, Any], ctx: Dict[str, Any]) -> None:
|
||||||
|
"""List modules with installation status."""
|
||||||
|
installed_status = get_installed_modules(config, ctx)
|
||||||
|
status_data = load_installed_status(ctx)
|
||||||
|
status_modules = status_data.get("modules", {})
|
||||||
|
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print("Module Status")
|
||||||
|
print("=" * 70)
|
||||||
|
print(f"{'#':<3} {'Name':<15} {'Status':<15} {'Installed At':<20} Description")
|
||||||
|
print("-" * 70)
|
||||||
|
|
||||||
|
for idx, (name, cfg) in enumerate(config.get("modules", {}).items(), 1):
|
||||||
|
desc = cfg.get("description", "")[:25]
|
||||||
|
if installed_status.get(name, False):
|
||||||
|
status = "✅ Installed"
|
||||||
|
installed_at = status_modules.get(name, {}).get("installed_at", "")[:16]
|
||||||
|
else:
|
||||||
|
status = "⬚ Not installed"
|
||||||
|
installed_at = ""
|
||||||
|
print(f"{idx:<3} {name:<15} {status:<15} {installed_at:<20} {desc}")
|
||||||
|
|
||||||
|
total = len(config.get("modules", {}))
|
||||||
|
installed_count = sum(1 for v in installed_status.values() if v)
|
||||||
|
print(f"\nTotal: {installed_count}/{total} modules installed")
|
||||||
|
print(f"Install dir: {ctx['install_dir']}")
|
||||||
|
|
||||||
|
|
||||||
def select_modules(config: Dict[str, Any], module_arg: Optional[str]) -> Dict[str, Any]:
|
def select_modules(config: Dict[str, Any], module_arg: Optional[str]) -> Dict[str, Any]:
|
||||||
modules = config.get("modules", {})
|
modules = config.get("modules", {})
|
||||||
if not module_arg:
|
if not module_arg:
|
||||||
return {k: v for k, v in modules.items() if v.get("enabled", False)}
|
# No --module specified: show interactive selection
|
||||||
|
return interactive_select_modules(config)
|
||||||
|
|
||||||
if module_arg.strip().lower() == "all":
|
if module_arg.strip().lower() == "all":
|
||||||
return {k: v for k, v in modules.items() if v.get("enabled", False)}
|
return dict(modules.items())
|
||||||
|
|
||||||
selected: Dict[str, Any] = {}
|
selected: Dict[str, Any] = {}
|
||||||
for name in (part.strip() for part in module_arg.split(",")):
|
for name in (part.strip() for part in module_arg.split(",")):
|
||||||
@@ -193,6 +274,256 @@ def select_modules(config: Dict[str, Any], module_arg: Optional[str]) -> Dict[st
|
|||||||
return selected
|
return selected
|
||||||
|
|
||||||
|
|
||||||
|
def interactive_select_modules(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Interactive module selection when no --module is specified."""
|
||||||
|
modules = config.get("modules", {})
|
||||||
|
module_names = list(modules.keys())
|
||||||
|
|
||||||
|
print("\n" + "=" * 65)
|
||||||
|
print("Welcome to Claude Plugin Installer")
|
||||||
|
print("=" * 65)
|
||||||
|
print("\nNo modules specified. Please select modules to install:\n")
|
||||||
|
|
||||||
|
list_modules(config)
|
||||||
|
|
||||||
|
print("\nEnter module numbers or names (comma-separated), or:")
|
||||||
|
print(" 'all' - Install all modules")
|
||||||
|
print(" 'q' - Quit without installing")
|
||||||
|
print()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
user_input = input("Select modules: ").strip()
|
||||||
|
except (EOFError, KeyboardInterrupt):
|
||||||
|
print("\nInstallation cancelled.")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
if not user_input:
|
||||||
|
print("No input. Please enter module numbers, names, 'all', or 'q'.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if user_input.lower() == "q":
|
||||||
|
print("Installation cancelled.")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
if user_input.lower() == "all":
|
||||||
|
print(f"\nSelected all {len(modules)} modules.")
|
||||||
|
return dict(modules.items())
|
||||||
|
|
||||||
|
# Parse selection
|
||||||
|
selected: Dict[str, Any] = {}
|
||||||
|
parts = [p.strip() for p in user_input.replace(" ", ",").split(",") if p.strip()]
|
||||||
|
|
||||||
|
try:
|
||||||
|
for part in parts:
|
||||||
|
# Try as number first
|
||||||
|
if part.isdigit():
|
||||||
|
idx = int(part) - 1
|
||||||
|
if 0 <= idx < len(module_names):
|
||||||
|
name = module_names[idx]
|
||||||
|
selected[name] = modules[name]
|
||||||
|
else:
|
||||||
|
print(f"Invalid number: {part}. Valid range: 1-{len(module_names)}")
|
||||||
|
selected = {}
|
||||||
|
break
|
||||||
|
# Try as name
|
||||||
|
elif part in modules:
|
||||||
|
selected[part] = modules[part]
|
||||||
|
else:
|
||||||
|
print(f"Module not found: '{part}'")
|
||||||
|
selected = {}
|
||||||
|
break
|
||||||
|
|
||||||
|
if selected:
|
||||||
|
names = ", ".join(selected.keys())
|
||||||
|
print(f"\nSelected {len(selected)} module(s): {names}")
|
||||||
|
return selected
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
print("Invalid input. Please try again.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
def uninstall_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Uninstall a module by removing its files."""
|
||||||
|
result: Dict[str, Any] = {
|
||||||
|
"module": name,
|
||||||
|
"status": "success",
|
||||||
|
"uninstalled_at": datetime.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
install_dir = ctx["install_dir"]
|
||||||
|
removed_paths = []
|
||||||
|
|
||||||
|
for op in cfg.get("operations", []):
|
||||||
|
op_type = op.get("type")
|
||||||
|
try:
|
||||||
|
if op_type in ("copy_dir", "copy_file"):
|
||||||
|
target = (install_dir / op["target"]).expanduser().resolve()
|
||||||
|
if target.exists():
|
||||||
|
if target.is_dir():
|
||||||
|
shutil.rmtree(target)
|
||||||
|
else:
|
||||||
|
target.unlink()
|
||||||
|
removed_paths.append(str(target))
|
||||||
|
write_log({"level": "INFO", "message": f"Removed: {target}"}, ctx)
|
||||||
|
# merge_dir and merge_json are harder to uninstall cleanly, skip
|
||||||
|
except Exception as exc:
|
||||||
|
write_log({"level": "WARNING", "message": f"Failed to remove {op.get('target', 'unknown')}: {exc}"}, ctx)
|
||||||
|
|
||||||
|
result["removed_paths"] = removed_paths
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def update_status_after_uninstall(uninstalled_modules: List[str], ctx: Dict[str, Any]) -> None:
|
||||||
|
"""Remove uninstalled modules from status file."""
|
||||||
|
status = load_installed_status(ctx)
|
||||||
|
modules = status.get("modules", {})
|
||||||
|
|
||||||
|
for name in uninstalled_modules:
|
||||||
|
if name in modules:
|
||||||
|
del modules[name]
|
||||||
|
|
||||||
|
status["modules"] = modules
|
||||||
|
status["updated_at"] = datetime.now().isoformat()
|
||||||
|
|
||||||
|
status_path = Path(ctx["status_file"])
|
||||||
|
with status_path.open("w", encoding="utf-8") as fh:
|
||||||
|
json.dump(status, fh, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
|
||||||
|
def interactive_manage(config: Dict[str, Any], ctx: Dict[str, Any]) -> int:
|
||||||
|
"""Interactive module management menu."""
|
||||||
|
while True:
|
||||||
|
installed_status = get_installed_modules(config, ctx)
|
||||||
|
modules = config.get("modules", {})
|
||||||
|
module_names = list(modules.keys())
|
||||||
|
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print("Claude Plugin Manager")
|
||||||
|
print("=" * 70)
|
||||||
|
print(f"{'#':<3} {'Name':<15} {'Status':<15} Description")
|
||||||
|
print("-" * 70)
|
||||||
|
|
||||||
|
for idx, (name, cfg) in enumerate(modules.items(), 1):
|
||||||
|
desc = cfg.get("description", "")[:30]
|
||||||
|
if installed_status.get(name, False):
|
||||||
|
status = "✅ Installed"
|
||||||
|
else:
|
||||||
|
status = "⬚ Not installed"
|
||||||
|
print(f"{idx:<3} {name:<15} {status:<15} {desc}")
|
||||||
|
|
||||||
|
total = len(modules)
|
||||||
|
installed_count = sum(1 for v in installed_status.values() if v)
|
||||||
|
print(f"\nInstalled: {installed_count}/{total} | Dir: {ctx['install_dir']}")
|
||||||
|
|
||||||
|
print("\nCommands:")
|
||||||
|
print(" i <num/name> - Install module(s)")
|
||||||
|
print(" u <num/name> - Uninstall module(s)")
|
||||||
|
print(" q - Quit")
|
||||||
|
print()
|
||||||
|
|
||||||
|
try:
|
||||||
|
user_input = input("Enter command: ").strip()
|
||||||
|
except (EOFError, KeyboardInterrupt):
|
||||||
|
print("\nExiting.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if not user_input:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if user_input.lower() == "q":
|
||||||
|
print("Goodbye!")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
parts = user_input.split(maxsplit=1)
|
||||||
|
cmd = parts[0].lower()
|
||||||
|
args = parts[1] if len(parts) > 1 else ""
|
||||||
|
|
||||||
|
if cmd == "i":
|
||||||
|
# Install
|
||||||
|
selected = _parse_module_selection(args, modules, module_names)
|
||||||
|
if selected:
|
||||||
|
# Filter out already installed
|
||||||
|
to_install = {k: v for k, v in selected.items() if not installed_status.get(k, False)}
|
||||||
|
if not to_install:
|
||||||
|
print("All selected modules are already installed.")
|
||||||
|
continue
|
||||||
|
print(f"\nInstalling: {', '.join(to_install.keys())}")
|
||||||
|
results = []
|
||||||
|
for name, cfg in to_install.items():
|
||||||
|
try:
|
||||||
|
results.append(execute_module(name, cfg, ctx))
|
||||||
|
print(f" ✓ {name} installed")
|
||||||
|
except Exception as exc:
|
||||||
|
print(f" ✗ {name} failed: {exc}")
|
||||||
|
# Update status
|
||||||
|
current_status = load_installed_status(ctx)
|
||||||
|
for r in results:
|
||||||
|
if r.get("status") == "success":
|
||||||
|
current_status.setdefault("modules", {})[r["module"]] = r
|
||||||
|
current_status["updated_at"] = datetime.now().isoformat()
|
||||||
|
with Path(ctx["status_file"]).open("w", encoding="utf-8") as fh:
|
||||||
|
json.dump(current_status, fh, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
elif cmd == "u":
|
||||||
|
# Uninstall
|
||||||
|
selected = _parse_module_selection(args, modules, module_names)
|
||||||
|
if selected:
|
||||||
|
# Filter to only installed ones
|
||||||
|
to_uninstall = {k: v for k, v in selected.items() if installed_status.get(k, False)}
|
||||||
|
if not to_uninstall:
|
||||||
|
print("None of the selected modules are installed.")
|
||||||
|
continue
|
||||||
|
print(f"\nUninstalling: {', '.join(to_uninstall.keys())}")
|
||||||
|
confirm = input("Confirm? (y/N): ").strip().lower()
|
||||||
|
if confirm != "y":
|
||||||
|
print("Cancelled.")
|
||||||
|
continue
|
||||||
|
for name, cfg in to_uninstall.items():
|
||||||
|
try:
|
||||||
|
uninstall_module(name, cfg, ctx)
|
||||||
|
print(f" ✓ {name} uninstalled")
|
||||||
|
except Exception as exc:
|
||||||
|
print(f" ✗ {name} failed: {exc}")
|
||||||
|
update_status_after_uninstall(list(to_uninstall.keys()), ctx)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f"Unknown command: {cmd}. Use 'i', 'u', or 'q'.")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_module_selection(
|
||||||
|
args: str, modules: Dict[str, Any], module_names: List[str]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Parse module selection from user input."""
|
||||||
|
if not args:
|
||||||
|
print("Please specify module number(s) or name(s).")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if args.lower() == "all":
|
||||||
|
return dict(modules.items())
|
||||||
|
|
||||||
|
selected: Dict[str, Any] = {}
|
||||||
|
parts = [p.strip() for p in args.replace(",", " ").split() if p.strip()]
|
||||||
|
|
||||||
|
for part in parts:
|
||||||
|
if part.isdigit():
|
||||||
|
idx = int(part) - 1
|
||||||
|
if 0 <= idx < len(module_names):
|
||||||
|
name = module_names[idx]
|
||||||
|
selected[name] = modules[name]
|
||||||
|
else:
|
||||||
|
print(f"Invalid number: {part}")
|
||||||
|
return {}
|
||||||
|
elif part in modules:
|
||||||
|
selected[part] = modules[part]
|
||||||
|
else:
|
||||||
|
print(f"Module not found: '{part}'")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return selected
|
||||||
|
|
||||||
|
|
||||||
def ensure_install_dir(path: Path) -> None:
|
def ensure_install_dir(path: Path) -> None:
|
||||||
path = Path(path)
|
path = Path(path)
|
||||||
if path.exists() and not path.is_dir():
|
if path.exists() and not path.is_dir():
|
||||||
@@ -529,10 +860,54 @@ def main(argv: Optional[Iterable[str]] = None) -> int:
|
|||||||
|
|
||||||
ctx = resolve_paths(config, args)
|
ctx = resolve_paths(config, args)
|
||||||
|
|
||||||
|
# Handle --list-modules
|
||||||
if getattr(args, "list_modules", False):
|
if getattr(args, "list_modules", False):
|
||||||
list_modules(config)
|
list_modules(config)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
# Handle --status
|
||||||
|
if getattr(args, "status", False):
|
||||||
|
list_modules_with_status(config, ctx)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Handle --uninstall
|
||||||
|
if getattr(args, "uninstall", False):
|
||||||
|
if not args.module:
|
||||||
|
print("Error: --uninstall requires --module to specify which modules to uninstall")
|
||||||
|
return 1
|
||||||
|
modules = config.get("modules", {})
|
||||||
|
installed = load_installed_status(ctx)
|
||||||
|
installed_modules = installed.get("modules", {})
|
||||||
|
|
||||||
|
selected = select_modules(config, args.module)
|
||||||
|
to_uninstall = {k: v for k, v in selected.items() if k in installed_modules}
|
||||||
|
|
||||||
|
if not to_uninstall:
|
||||||
|
print("None of the specified modules are installed.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print(f"Uninstalling {len(to_uninstall)} module(s): {', '.join(to_uninstall.keys())}")
|
||||||
|
for name, cfg in to_uninstall.items():
|
||||||
|
try:
|
||||||
|
uninstall_module(name, cfg, ctx)
|
||||||
|
print(f" ✓ {name} uninstalled")
|
||||||
|
except Exception as exc:
|
||||||
|
print(f" ✗ {name} failed: {exc}", file=sys.stderr)
|
||||||
|
|
||||||
|
update_status_after_uninstall(list(to_uninstall.keys()), ctx)
|
||||||
|
print(f"\n✓ Uninstall complete")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# No --module specified: enter interactive management mode
|
||||||
|
if not args.module:
|
||||||
|
try:
|
||||||
|
ensure_install_dir(ctx["install_dir"])
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"Failed to prepare install dir: {exc}", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
return interactive_manage(config, ctx)
|
||||||
|
|
||||||
|
# Install specified modules
|
||||||
modules = select_modules(config, args.module)
|
modules = select_modules(config, args.module)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -568,7 +943,14 @@ def main(argv: Optional[Iterable[str]] = None) -> int:
|
|||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
write_status(results, ctx)
|
# Merge with existing status
|
||||||
|
current_status = load_installed_status(ctx)
|
||||||
|
for r in results:
|
||||||
|
if r.get("status") == "success":
|
||||||
|
current_status.setdefault("modules", {})[r["module"]] = r
|
||||||
|
current_status["updated_at"] = datetime.now().isoformat()
|
||||||
|
with Path(ctx["status_file"]).open("w", encoding="utf-8") as fh:
|
||||||
|
json.dump(current_status, fh, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
# Summary
|
# Summary
|
||||||
success = sum(1 for r in results if r.get("status") == "success")
|
success = sum(1 for r in results if r.get("status") == "success")
|
||||||
|
|||||||
@@ -22,8 +22,8 @@ python3 install.py --module omo
|
|||||||
| librarian | External research | claude | claude-sonnet-4-5-20250929 |
|
| librarian | External research | claude | claude-sonnet-4-5-20250929 |
|
||||||
| explore | Codebase search | opencode | opencode/grok-code |
|
| explore | Codebase search | opencode | opencode/grok-code |
|
||||||
| develop | Code implementation | codex | gpt-5.2 |
|
| develop | Code implementation | codex | gpt-5.2 |
|
||||||
| frontend-ui-ux-engineer | UI/UX specialist | gemini | gemini-3-pro-high |
|
| frontend-ui-ux-engineer | UI/UX specialist | gemini | gemini-3-pro-preview |
|
||||||
| document-writer | Documentation | gemini | gemini-3-flash |
|
| document-writer | Documentation | gemini | gemini-3-flash-preview |
|
||||||
|
|
||||||
## How It Works
|
## How It Works
|
||||||
|
|
||||||
@@ -96,12 +96,12 @@ Agent-model mappings are configured in `~/.codeagent/models.json`:
|
|||||||
},
|
},
|
||||||
"frontend-ui-ux-engineer": {
|
"frontend-ui-ux-engineer": {
|
||||||
"backend": "gemini",
|
"backend": "gemini",
|
||||||
"model": "gemini-3-pro-high",
|
"model": "gemini-3-pro-preview",
|
||||||
"description": "Frontend engineer"
|
"description": "Frontend engineer"
|
||||||
},
|
},
|
||||||
"document-writer": {
|
"document-writer": {
|
||||||
"backend": "gemini",
|
"backend": "gemini",
|
||||||
"model": "gemini-3-flash",
|
"model": "gemini-3-flash-preview",
|
||||||
"description": "Documentation"
|
"description": "Documentation"
|
||||||
},
|
},
|
||||||
"develop": {
|
"develop": {
|
||||||
|
|||||||
9
skills/sparv/.claude-plugin/plugin.json
Normal file
9
skills/sparv/.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"name": "sparv",
|
||||||
|
"description": "Minimal SPARV workflow (Specify→Plan→Act→Review→Vault) with 10-point spec gate, unified journal, 2-action saves, 3-failure protocol, and EHRB risk detection.",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"author": {
|
||||||
|
"name": "cexll",
|
||||||
|
"email": "cexll@cexll.com"
|
||||||
|
}
|
||||||
|
}
|
||||||
96
skills/sparv/README.md
Normal file
96
skills/sparv/README.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# SPARV - Unified Development Workflow (Simplified)
|
||||||
|
|
||||||
|
[]()
|
||||||
|
[]()
|
||||||
|
|
||||||
|
**SPARV** is an end-to-end development workflow: maximize delivery quality with minimal rules while avoiding "infinite iteration + self-rationalization."
|
||||||
|
|
||||||
|
```
|
||||||
|
S-Specify → P-Plan → A-Act → R-Review → V-Vault
|
||||||
|
Clarify Plan Execute Review Archive
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Changes (Over-engineering Removed)
|
||||||
|
|
||||||
|
- External memory merged from 3 files into 1 `.sparv/journal.md`
|
||||||
|
- Specify scoring simplified from 100-point to 10-point scale (threshold `>=9`)
|
||||||
|
- Reboot Test reduced from 5 questions to 3 questions
|
||||||
|
- Removed concurrency locks (Claude is single-threaded; locks only cause failures)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
SPARV is installed at `~/.claude/skills/sparv/`.
|
||||||
|
|
||||||
|
Install from ZIP:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
unzip sparv.zip -d ~/.claude/skills/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
Run in project root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/init-session.sh --force
|
||||||
|
```
|
||||||
|
|
||||||
|
Creates:
|
||||||
|
|
||||||
|
```
|
||||||
|
.sparv/
|
||||||
|
├── state.yaml
|
||||||
|
├── journal.md
|
||||||
|
└── history/
|
||||||
|
```
|
||||||
|
|
||||||
|
## External Memory System (Two Files)
|
||||||
|
|
||||||
|
- `state.yaml`: State (minimum fields: `session_id/current_phase/action_count/consecutive_failures`)
|
||||||
|
- `journal.md`: Unified log (Plan/Progress/Findings all go here)
|
||||||
|
|
||||||
|
After archiving:
|
||||||
|
|
||||||
|
```
|
||||||
|
.sparv/history/<session_id>/
|
||||||
|
├── state.yaml
|
||||||
|
└── journal.md
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Numbers
|
||||||
|
|
||||||
|
| Number | Meaning |
|
||||||
|
|--------|---------|
|
||||||
|
| **9/10** | Specify score passing threshold |
|
||||||
|
| **2** | Write to journal every 2 tool calls |
|
||||||
|
| **3** | Failure retry limit / Review fix limit |
|
||||||
|
| **3** | Reboot Test question count |
|
||||||
|
| **12** | Default max iterations (optional safety valve) |
|
||||||
|
|
||||||
|
## Script Tools
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/init-session.sh --force
|
||||||
|
~/.claude/skills/sparv/scripts/save-progress.sh "Edit" "done"
|
||||||
|
~/.claude/skills/sparv/scripts/check-ehrb.sh --diff --fail-on-flags
|
||||||
|
~/.claude/skills/sparv/scripts/failure-tracker.sh fail --note "tests are flaky"
|
||||||
|
~/.claude/skills/sparv/scripts/reboot-test.sh --strict
|
||||||
|
~/.claude/skills/sparv/scripts/archive-session.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hooks
|
||||||
|
|
||||||
|
Hooks defined in `hooks/hooks.json`:
|
||||||
|
|
||||||
|
- PostToolUse: 2-Action auto-write to `journal.md`
|
||||||
|
- PreToolUse: EHRB risk prompt (default dry-run)
|
||||||
|
- Stop: 3-question reboot test (strict)
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- `SKILL.md`: Skill definition (for agent use)
|
||||||
|
- `references/methodology.md`: Methodology quick reference
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Quality over speed—iterate until truly complete.*
|
||||||
153
skills/sparv/SKILL.md
Normal file
153
skills/sparv/SKILL.md
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
---
|
||||||
|
name: sparv
|
||||||
|
description: Minimal SPARV workflow (Specify→Plan→Act→Review→Vault) with 10-point spec gate, unified journal, 2-action saves, 3-failure protocol, and EHRB risk detection.
|
||||||
|
---
|
||||||
|
|
||||||
|
# SPARV
|
||||||
|
|
||||||
|
Five-phase workflow: **S**pecify → **P**lan → **A**ct → **R**eview → **V**ault.
|
||||||
|
|
||||||
|
Goal: Complete "requirements → verifiable delivery" in one pass, recording key decisions in external memory instead of relying on assumptions.
|
||||||
|
|
||||||
|
## Core Rules (Mandatory)
|
||||||
|
|
||||||
|
- **10-Point Specify Gate**: Spec score `0-10`; must be `>=9` to enter Plan.
|
||||||
|
- **2-Action Save**: Append an entry to `.sparv/journal.md` every 2 tool calls.
|
||||||
|
- **3-Failure Protocol**: Stop and escalate to user after 3 consecutive failures.
|
||||||
|
- **EHRB**: Require explicit user confirmation when high-risk detected (production/sensitive data/destructive/billing API/security-critical).
|
||||||
|
- **Fixed Phase Names**: `specify|plan|act|review|vault` (stored in `.sparv/state.yaml:current_phase`).
|
||||||
|
|
||||||
|
## Enhanced Rules (v1.1)
|
||||||
|
|
||||||
|
### Uncertainty Declaration (G3)
|
||||||
|
|
||||||
|
When any Specify dimension scores < 2:
|
||||||
|
- Declare: `UNCERTAIN: <what> | ASSUMPTION: <fallback>`
|
||||||
|
- List all assumptions in journal before Plan
|
||||||
|
- Offer 2-3 options for ambiguous requirements
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
UNCERTAIN: deployment target | ASSUMPTION: Docker container
|
||||||
|
UNCERTAIN: auth method | OPTIONS: JWT / OAuth2 / Session
|
||||||
|
```
|
||||||
|
|
||||||
|
### Requirement Routing
|
||||||
|
|
||||||
|
| Mode | Condition | Flow |
|
||||||
|
|------|-----------|------|
|
||||||
|
| **Quick** | score >= 9 AND <= 3 files AND no EHRB | Specify → Act → Review |
|
||||||
|
| **Full** | otherwise | Specify → Plan → Act → Review → Vault |
|
||||||
|
|
||||||
|
Quick mode skips formal Plan phase but still requires:
|
||||||
|
- Completion promise written to journal
|
||||||
|
- 2-action save rule applies
|
||||||
|
- Review phase mandatory
|
||||||
|
|
||||||
|
### Context Acquisition (Optional)
|
||||||
|
|
||||||
|
Before Specify scoring:
|
||||||
|
1. Check `.sparv/kb.md` for existing patterns/decisions
|
||||||
|
2. If insufficient, scan codebase for relevant files
|
||||||
|
3. Document findings in journal under `## Context`
|
||||||
|
|
||||||
|
Skip if user explicitly provides full context.
|
||||||
|
|
||||||
|
### Knowledge Base Maintenance
|
||||||
|
|
||||||
|
During Vault phase, update `.sparv/kb.md`:
|
||||||
|
- **Patterns**: Reusable code patterns discovered
|
||||||
|
- **Decisions**: Architectural choices + rationale
|
||||||
|
- **Gotchas**: Common pitfalls + solutions
|
||||||
|
|
||||||
|
### CHANGELOG Update
|
||||||
|
|
||||||
|
Use during Review or Vault phase for non-trivial changes:
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/changelog-update.sh --type <Added|Changed|Fixed|Removed> --desc "..."
|
||||||
|
```
|
||||||
|
|
||||||
|
## External Memory (Two Files)
|
||||||
|
|
||||||
|
Initialize (run in project root):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/init-session.sh --force
|
||||||
|
```
|
||||||
|
|
||||||
|
File conventions:
|
||||||
|
|
||||||
|
- `.sparv/state.yaml`: State machine (minimum fields: `session_id/current_phase/action_count/consecutive_failures`)
|
||||||
|
- `.sparv/journal.md`: Unified log (Plan/Progress/Findings all go here)
|
||||||
|
- `.sparv/history/<session_id>/`: Archive directory
|
||||||
|
|
||||||
|
## Phase 1: Specify (10-Point Scale)
|
||||||
|
|
||||||
|
Each item scores 0/1/2, total 0-10:
|
||||||
|
|
||||||
|
1) **Value**: Why do it, are benefits/metrics verifiable
|
||||||
|
2) **Scope**: MVP + what's out of scope
|
||||||
|
3) **Acceptance**: Testable acceptance criteria
|
||||||
|
4) **Boundaries**: Error/performance/compatibility/security critical boundaries
|
||||||
|
5) **Risk**: EHRB/dependencies/unknowns + handling approach
|
||||||
|
|
||||||
|
`score < 9`: Keep asking questions; do not enter Plan.
|
||||||
|
`score >= 9`: Write a clear `completion_promise` (verifiable completion commitment), then enter Plan.
|
||||||
|
|
||||||
|
## Phase 2: Plan
|
||||||
|
|
||||||
|
- Break into atomic tasks (2-5 minute granularity), each with a verifiable output/test point.
|
||||||
|
- Write the plan to `.sparv/journal.md` (Plan section or append directly).
|
||||||
|
|
||||||
|
## Phase 3: Act
|
||||||
|
|
||||||
|
- **TDD Rule**: No failing test → no production code.
|
||||||
|
- Auto-write journal every 2 actions (PostToolUse hook).
|
||||||
|
- Failure counting (3-Failure Protocol):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/failure-tracker.sh fail --note "short blocker"
|
||||||
|
~/.claude/skills/sparv/scripts/failure-tracker.sh reset
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 4: Review
|
||||||
|
|
||||||
|
- Two stages: Spec conformance → Code quality (correctness/performance/security/tests).
|
||||||
|
- Maximum 3 fix rounds; escalate to user if exceeded.
|
||||||
|
|
||||||
|
Run 3-question reboot test before session ends:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/reboot-test.sh --strict
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 5: Vault
|
||||||
|
|
||||||
|
Archive current session:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/archive-session.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Script Tools
|
||||||
|
|
||||||
|
| Script | Purpose |
|
||||||
|
|--------|---------|
|
||||||
|
| `scripts/init-session.sh` | Initialize `.sparv/`, generate `state.yaml` + `journal.md` |
|
||||||
|
| `scripts/save-progress.sh` | Maintain `action_count`, append to `journal.md` every 2 actions |
|
||||||
|
| `scripts/check-ehrb.sh` | Scan diff/text, output (optionally write) `ehrb_flags` |
|
||||||
|
| `scripts/failure-tracker.sh` | Maintain `consecutive_failures`, exit code 3 when reaching 3 |
|
||||||
|
| `scripts/reboot-test.sh` | 3-question self-check (optional strict mode) |
|
||||||
|
| `scripts/archive-session.sh` | Archive `journal.md` + `state.yaml` to `history/` |
|
||||||
|
|
||||||
|
## Auto Hooks
|
||||||
|
|
||||||
|
`hooks/hooks.json`:
|
||||||
|
|
||||||
|
- PostToolUse: `save-progress.sh` (2-Action save)
|
||||||
|
- PreToolUse: `check-ehrb.sh --diff --dry-run` (prompt only, no state write)
|
||||||
|
- Stop: `reboot-test.sh --strict` (3-question self-check)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Quality over speed—iterate until truly complete.*
|
||||||
37
skills/sparv/hooks/hooks.json
Normal file
37
skills/sparv/hooks/hooks.json
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"description": "SPARV auto-hooks for 2-Action save, EHRB detection, and 3-Question reboot test",
|
||||||
|
"hooks": {
|
||||||
|
"PostToolUse": [
|
||||||
|
{
|
||||||
|
"matcher": "Edit|Write|Bash|Read|Glob|Grep",
|
||||||
|
"hooks": [
|
||||||
|
{
|
||||||
|
"type": "command",
|
||||||
|
"command": "[ -f .sparv/state.yaml ] && ${SKILL_PATH}/scripts/save-progress.sh \"${TOOL_NAME:-unknown}\" \"completed\" 2>/dev/null || true"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"PreToolUse": [
|
||||||
|
{
|
||||||
|
"matcher": "Edit|Write",
|
||||||
|
"hooks": [
|
||||||
|
{
|
||||||
|
"type": "command",
|
||||||
|
"command": "[ -f .sparv/state.yaml ] && ${SKILL_PATH}/scripts/check-ehrb.sh --diff --dry-run 2>/dev/null || true"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Stop": [
|
||||||
|
{
|
||||||
|
"hooks": [
|
||||||
|
{
|
||||||
|
"type": "command",
|
||||||
|
"command": "[ -f .sparv/state.yaml ] && ${SKILL_PATH}/scripts/reboot-test.sh --strict 2>/dev/null || true"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
132
skills/sparv/references/methodology.md
Normal file
132
skills/sparv/references/methodology.md
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# SPARV Methodology (Short)
|
||||||
|
|
||||||
|
This document is a quick reference; the canonical spec is in `SKILL.md`.
|
||||||
|
|
||||||
|
## Five Phases
|
||||||
|
|
||||||
|
- **Specify**: Write requirements as verifiable specs (10-point gate)
|
||||||
|
- **Plan**: Break into atomic tasks (2-5 minute granularity)
|
||||||
|
- **Act**: TDD-driven implementation; write to journal every 2 actions
|
||||||
|
- **Review**: Spec conformance → Code quality; maximum 3 fix rounds
|
||||||
|
- **Vault**: Archive session (state + journal)
|
||||||
|
|
||||||
|
## Enhanced Rules (v1.1)
|
||||||
|
|
||||||
|
### Uncertainty Declaration (G3)
|
||||||
|
|
||||||
|
When any Specify dimension scores < 2:
|
||||||
|
- Declare: `UNCERTAIN: <what> | ASSUMPTION: <fallback>`
|
||||||
|
- List all assumptions in journal before Plan
|
||||||
|
- Offer 2-3 options for ambiguous requirements
|
||||||
|
|
||||||
|
### Requirement Routing
|
||||||
|
|
||||||
|
| Mode | Condition | Flow |
|
||||||
|
|------|-----------|------|
|
||||||
|
| **Quick** | score >= 9 AND <= 3 files AND no EHRB | Specify → Act → Review |
|
||||||
|
| **Full** | otherwise | Specify → Plan → Act → Review → Vault |
|
||||||
|
|
||||||
|
### Context Acquisition (Optional)
|
||||||
|
|
||||||
|
Before Specify scoring:
|
||||||
|
1. Check `.sparv/kb.md` for existing patterns/decisions
|
||||||
|
2. If insufficient, scan codebase for relevant files
|
||||||
|
3. Document findings in journal under `## Context`
|
||||||
|
|
||||||
|
### Knowledge Base Maintenance
|
||||||
|
|
||||||
|
During Vault phase, update `.sparv/kb.md`:
|
||||||
|
- **Patterns**: Reusable code patterns discovered
|
||||||
|
- **Decisions**: Architectural choices + rationale
|
||||||
|
- **Gotchas**: Common pitfalls + solutions
|
||||||
|
|
||||||
|
### CHANGELOG Update
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/changelog-update.sh --type <Added|Changed|Fixed|Removed> --desc "..."
|
||||||
|
```
|
||||||
|
|
||||||
|
## Specify (10-Point Scale)
|
||||||
|
|
||||||
|
Each item scores 0/1/2, total 0-10; `>=9` required to enter Plan:
|
||||||
|
|
||||||
|
1) Value: Why do it, are benefits/metrics verifiable
|
||||||
|
2) Scope: MVP + what's out of scope
|
||||||
|
3) Acceptance: Testable acceptance criteria
|
||||||
|
4) Boundaries: Error/performance/compatibility/security critical boundaries
|
||||||
|
5) Risk: EHRB/dependencies/unknowns + handling approach
|
||||||
|
|
||||||
|
If below threshold, keep asking—don't "just start coding."
|
||||||
|
|
||||||
|
## Journal Convention (Unified Log)
|
||||||
|
|
||||||
|
All Plan/Progress/Findings go into `.sparv/journal.md`.
|
||||||
|
|
||||||
|
Recommended format (just append, no need to "insert into specific sections"):
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## 14:32 - Action #12
|
||||||
|
- Tool: Edit
|
||||||
|
- Result: Updated auth flow
|
||||||
|
- Next: Add test for invalid token
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2-Action Save
|
||||||
|
|
||||||
|
Hook triggers `save-progress.sh` after each tool call; script only writes to journal when `action_count` is even.
|
||||||
|
|
||||||
|
## 3-Failure Protocol
|
||||||
|
|
||||||
|
When you fail consecutively, escalate by level:
|
||||||
|
|
||||||
|
1. Diagnose and fix (read errors, verify assumptions, minimal fix)
|
||||||
|
2. Alternative approach (change strategy/entry point)
|
||||||
|
3. Escalate (stop: document blocker + attempted solutions + request user decision)
|
||||||
|
|
||||||
|
Tools:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/failure-tracker.sh fail --note "short reason"
|
||||||
|
~/.claude/skills/sparv/scripts/failure-tracker.sh reset
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3-Question Reboot Test
|
||||||
|
|
||||||
|
Self-check before session ends (or when lost):
|
||||||
|
|
||||||
|
1) Where am I? (current_phase)
|
||||||
|
2) Where am I going? (next_phase)
|
||||||
|
3) How do I prove completion? (completion_promise + evidence at journal end)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/reboot-test.sh --strict
|
||||||
|
```
|
||||||
|
|
||||||
|
## EHRB (High-Risk Changes)
|
||||||
|
|
||||||
|
Detection items (any match requires explicit user confirmation):
|
||||||
|
|
||||||
|
- Production access
|
||||||
|
- Sensitive data
|
||||||
|
- Destructive operations
|
||||||
|
- Billing external API
|
||||||
|
- Security-critical changes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.claude/skills/sparv/scripts/check-ehrb.sh --diff --fail-on-flags
|
||||||
|
```
|
||||||
|
|
||||||
|
## state.yaml (Minimal Schema)
|
||||||
|
|
||||||
|
Scripts only enforce 4 core fields; other fields are optional:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
session_id: "20260114-143022"
|
||||||
|
current_phase: "act"
|
||||||
|
action_count: 14
|
||||||
|
consecutive_failures: 0
|
||||||
|
max_iterations: 12
|
||||||
|
iteration_count: 0
|
||||||
|
completion_promise: "All acceptance criteria have tests and are green."
|
||||||
|
ehrb_flags: []
|
||||||
|
```
|
||||||
95
skills/sparv/scripts/archive-session.sh
Executable file
95
skills/sparv/scripts/archive-session.sh
Executable file
@@ -0,0 +1,95 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# SPARV Session Archive Script
|
||||||
|
# Archives completed session from .sparv/plan/<session_id>/ to .sparv/history/<session_id>/
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "$SCRIPT_DIR/lib/state-lock.sh"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: archive-session.sh [--dry-run]
|
||||||
|
|
||||||
|
Moves current session from .sparv/plan/<session_id>/ to .sparv/history/<session_id>/
|
||||||
|
Updates .sparv/history/index.md with session info.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dry-run Show what would be archived without doing it
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
SPARV_ROOT=".sparv"
|
||||||
|
PLAN_DIR="$SPARV_ROOT/plan"
|
||||||
|
HISTORY_DIR="$SPARV_ROOT/history"
|
||||||
|
|
||||||
|
dry_run=0
|
||||||
|
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
-h|--help) usage; exit 0 ;;
|
||||||
|
--dry-run) dry_run=1; shift ;;
|
||||||
|
*) usage >&2; exit 1 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Find active session
|
||||||
|
find_active_session() {
|
||||||
|
if [ -d "$PLAN_DIR" ]; then
|
||||||
|
local session
|
||||||
|
session="$(ls -1 "$PLAN_DIR" 2>/dev/null | head -1)"
|
||||||
|
if [ -n "$session" ] && [ -f "$PLAN_DIR/$session/state.yaml" ]; then
|
||||||
|
echo "$session"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update history/index.md
|
||||||
|
update_history_index() {
|
||||||
|
local session_id="$1"
|
||||||
|
local index_file="$HISTORY_DIR/index.md"
|
||||||
|
local state_file="$HISTORY_DIR/$session_id/state.yaml"
|
||||||
|
|
||||||
|
[ -f "$index_file" ] || return 0
|
||||||
|
|
||||||
|
# Get feature name from state.yaml
|
||||||
|
local fname=""
|
||||||
|
if [ -f "$state_file" ]; then
|
||||||
|
fname="$(grep -E '^feature_name:' "$state_file" | sed -E 's/^feature_name:[[:space:]]*"?([^"]*)"?$/\1/' || true)"
|
||||||
|
fi
|
||||||
|
[ -z "$fname" ] && fname="unnamed"
|
||||||
|
|
||||||
|
local month="${session_id:0:6}"
|
||||||
|
local formatted_month="${month:0:4}-${month:4:2}"
|
||||||
|
|
||||||
|
# Add to monthly section if not exists
|
||||||
|
if ! grep -q "### $formatted_month" "$index_file"; then
|
||||||
|
echo -e "\n### $formatted_month\n" >> "$index_file"
|
||||||
|
fi
|
||||||
|
echo "- \`${session_id}\` - $fname" >> "$index_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
SESSION_ID="$(find_active_session)"
|
||||||
|
|
||||||
|
if [ -z "$SESSION_ID" ]; then
|
||||||
|
echo "No active session to archive"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
SRC_DIR="$PLAN_DIR/$SESSION_ID"
|
||||||
|
DST_DIR="$HISTORY_DIR/$SESSION_ID"
|
||||||
|
|
||||||
|
if [ "$dry_run" -eq 1 ]; then
|
||||||
|
echo "Would archive: $SRC_DIR -> $DST_DIR"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create history directory and move session
|
||||||
|
mkdir -p "$HISTORY_DIR"
|
||||||
|
mv "$SRC_DIR" "$DST_DIR"
|
||||||
|
|
||||||
|
# Update index
|
||||||
|
update_history_index "$SESSION_ID"
|
||||||
|
|
||||||
|
echo "✅ Session archived: $SESSION_ID"
|
||||||
|
echo "📁 Location: $DST_DIR"
|
||||||
112
skills/sparv/scripts/changelog-update.sh
Executable file
112
skills/sparv/scripts/changelog-update.sh
Executable file
@@ -0,0 +1,112 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# SPARV Changelog Update Script
|
||||||
|
# Adds entries to .sparv/CHANGELOG.md under [Unreleased] section
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: changelog-update.sh --type <TYPE> --desc "description" [--file PATH]
|
||||||
|
|
||||||
|
Adds a changelog entry under [Unreleased] section.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--type TYPE Change type: Added|Changed|Fixed|Removed
|
||||||
|
--desc DESC Description of the change
|
||||||
|
--file PATH Custom changelog path (default: .sparv/CHANGELOG.md)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
changelog-update.sh --type Added --desc "User authentication module"
|
||||||
|
changelog-update.sh --type Fixed --desc "Login timeout issue"
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
CHANGELOG=".sparv/CHANGELOG.md"
|
||||||
|
TYPE=""
|
||||||
|
DESC=""
|
||||||
|
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
-h|--help) usage; exit 0 ;;
|
||||||
|
--type) TYPE="$2"; shift 2 ;;
|
||||||
|
--desc) DESC="$2"; shift 2 ;;
|
||||||
|
--file) CHANGELOG="$2"; shift 2 ;;
|
||||||
|
*) usage >&2; exit 1 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Validate inputs
|
||||||
|
if [ -z "$TYPE" ] || [ -z "$DESC" ]; then
|
||||||
|
echo "❌ Error: --type and --desc are required" >&2
|
||||||
|
usage >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate type
|
||||||
|
case "$TYPE" in
|
||||||
|
Added|Changed|Fixed|Removed) ;;
|
||||||
|
*)
|
||||||
|
echo "❌ Error: Invalid type '$TYPE'. Must be: Added|Changed|Fixed|Removed" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Check changelog exists
|
||||||
|
if [ ! -f "$CHANGELOG" ]; then
|
||||||
|
echo "❌ Error: Changelog not found: $CHANGELOG" >&2
|
||||||
|
echo " Run init-session.sh first to create it." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if [Unreleased] section exists
|
||||||
|
if ! grep -q "## \[Unreleased\]" "$CHANGELOG"; then
|
||||||
|
echo "❌ Error: [Unreleased] section not found in $CHANGELOG" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if the type section already exists under [Unreleased]
|
||||||
|
# We need to insert after [Unreleased] but before the next ## section
|
||||||
|
TEMP_FILE=$(mktemp)
|
||||||
|
trap "rm -f $TEMP_FILE" EXIT
|
||||||
|
|
||||||
|
# Find if ### $TYPE exists between [Unreleased] and next ## section
|
||||||
|
IN_UNRELEASED=0
|
||||||
|
TYPE_FOUND=0
|
||||||
|
TYPE_LINE=0
|
||||||
|
UNRELEASED_LINE=0
|
||||||
|
NEXT_SECTION_LINE=0
|
||||||
|
|
||||||
|
line_num=0
|
||||||
|
while IFS= read -r line; do
|
||||||
|
((line_num++))
|
||||||
|
if [[ "$line" =~ ^##[[:space:]]\[Unreleased\] ]]; then
|
||||||
|
IN_UNRELEASED=1
|
||||||
|
UNRELEASED_LINE=$line_num
|
||||||
|
elif [[ $IN_UNRELEASED -eq 1 && "$line" =~ ^##[[:space:]] && ! "$line" =~ ^###[[:space:]] ]]; then
|
||||||
|
NEXT_SECTION_LINE=$line_num
|
||||||
|
break
|
||||||
|
elif [[ $IN_UNRELEASED -eq 1 && "$line" =~ ^###[[:space:]]$TYPE ]]; then
|
||||||
|
TYPE_FOUND=1
|
||||||
|
TYPE_LINE=$line_num
|
||||||
|
fi
|
||||||
|
done < "$CHANGELOG"
|
||||||
|
|
||||||
|
if [ $TYPE_FOUND -eq 1 ]; then
|
||||||
|
# Append under existing ### $TYPE section
|
||||||
|
awk -v type_line="$TYPE_LINE" -v desc="$DESC" '
|
||||||
|
NR == type_line { print; getline; print; print "- " desc; next }
|
||||||
|
{ print }
|
||||||
|
' "$CHANGELOG" > "$TEMP_FILE"
|
||||||
|
else
|
||||||
|
# Create new ### $TYPE section after [Unreleased]
|
||||||
|
awk -v unreleased_line="$UNRELEASED_LINE" -v type="$TYPE" -v desc="$DESC" '
|
||||||
|
NR == unreleased_line { print; print ""; print "### " type; print "- " desc; next }
|
||||||
|
{ print }
|
||||||
|
' "$CHANGELOG" > "$TEMP_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
mv "$TEMP_FILE" "$CHANGELOG"
|
||||||
|
|
||||||
|
echo "✅ Added to $CHANGELOG:"
|
||||||
|
echo " ### $TYPE"
|
||||||
|
echo " - $DESC"
|
||||||
182
skills/sparv/scripts/check-ehrb.sh
Executable file
182
skills/sparv/scripts/check-ehrb.sh
Executable file
@@ -0,0 +1,182 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# EHRB Risk Detection Script
|
||||||
|
# Heuristically detects high-risk changes/specs and writes flags to .sparv/state.yaml:ehrb_flags.
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "$SCRIPT_DIR/lib/state-lock.sh"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: check-ehrb.sh [options] [FILE...]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--diff Scan current git diff (staged + unstaged) and changed file names
|
||||||
|
--clear Clear ehrb_flags in .sparv/state.yaml (no scan needed)
|
||||||
|
--dry-run Do not write .sparv/state.yaml (print detected flags only)
|
||||||
|
--fail-on-flags Exit with code 2 if any flags are detected
|
||||||
|
-h, --help Show this help
|
||||||
|
|
||||||
|
Input:
|
||||||
|
- --diff
|
||||||
|
- positional FILE...
|
||||||
|
- stdin (if piped)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
check-ehrb.sh --diff --fail-on-flags
|
||||||
|
check-ehrb.sh docs/feature-prd.md
|
||||||
|
echo "touching production db" | check-ehrb.sh --fail-on-flags
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
die() {
|
||||||
|
echo "❌ $*" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
is_piped_stdin() {
|
||||||
|
[ ! -t 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
git_text() {
|
||||||
|
git diff --cached 2>/dev/null || true
|
||||||
|
git diff 2>/dev/null || true
|
||||||
|
(git diff --name-only --cached 2>/dev/null; git diff --name-only 2>/dev/null) | sort -u || true
|
||||||
|
}
|
||||||
|
|
||||||
|
render_inline_list() {
|
||||||
|
if [ "$#" -eq 0 ]; then
|
||||||
|
printf "[]"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
printf "["
|
||||||
|
local first=1 item
|
||||||
|
for item in "$@"; do
|
||||||
|
if [ "$first" -eq 1 ]; then
|
||||||
|
first=0
|
||||||
|
else
|
||||||
|
printf ", "
|
||||||
|
fi
|
||||||
|
printf "\"%s\"" "$item"
|
||||||
|
done
|
||||||
|
printf "]"
|
||||||
|
}
|
||||||
|
|
||||||
|
write_ehrb_flags() {
|
||||||
|
local list_value="$1"
|
||||||
|
sparv_require_state_file
|
||||||
|
sparv_state_validate_or_die
|
||||||
|
sparv_yaml_set_raw ehrb_flags "$list_value"
|
||||||
|
}
|
||||||
|
|
||||||
|
scan_diff=0
|
||||||
|
dry_run=0
|
||||||
|
clear=0
|
||||||
|
fail_on_flags=0
|
||||||
|
declare -a files=()
|
||||||
|
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
-h|--help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
--diff)
|
||||||
|
scan_diff=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--clear)
|
||||||
|
clear=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--dry-run)
|
||||||
|
dry_run=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--fail-on-flags)
|
||||||
|
fail_on_flags=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--)
|
||||||
|
shift
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
die "Unknown argument: $1 (use --help for usage)"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
files+=("$1")
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
for path in "$@"; do
|
||||||
|
files+=("$path")
|
||||||
|
done
|
||||||
|
|
||||||
|
scan_text=""
|
||||||
|
|
||||||
|
if [ "$scan_diff" -eq 1 ]; then
|
||||||
|
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||||
|
scan_text+=$'\n'"$(git_text)"
|
||||||
|
else
|
||||||
|
die "--diff requires running inside a git repository"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${#files[@]}" -gt 0 ]; then
|
||||||
|
for path in "${files[@]}"; do
|
||||||
|
[ -f "$path" ] || die "File not found: $path"
|
||||||
|
scan_text+=$'\n'"$(cat "$path")"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if is_piped_stdin; then
|
||||||
|
scan_text+=$'\n'"$(cat)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
declare -a flags=()
|
||||||
|
if [ "$clear" -eq 1 ]; then
|
||||||
|
flags=()
|
||||||
|
else
|
||||||
|
[ -n "$scan_text" ] || die "No scannable input (use --help to see input methods)"
|
||||||
|
|
||||||
|
if printf "%s" "$scan_text" | grep -Eiq '(^|[^a-z])(prod(uction)?|live)([^a-z]|$)|kubeconfig|kubectl|terraform|helm|eks|gke|aks'; then
|
||||||
|
flags+=("production-access")
|
||||||
|
fi
|
||||||
|
if printf "%s" "$scan_text" | grep -Eiq 'pii|phi|hipaa|ssn|password|passwd|secret|token|api[ _-]?key|private key|credit card|身份证|银行卡|医疗|患者'; then
|
||||||
|
flags+=("sensitive-data")
|
||||||
|
fi
|
||||||
|
if printf "%s" "$scan_text" | grep -Eiq 'rm[[:space:]]+-rf|drop[[:space:]]+table|delete[[:space:]]+from|truncate|terraform[[:space:]]+destroy|kubectl[[:space:]]+delete|drop[[:space:]]+database|wipe|purge'; then
|
||||||
|
flags+=("destructive-ops")
|
||||||
|
fi
|
||||||
|
if printf "%s" "$scan_text" | grep -Eiq 'stripe|paypal|billing|charge|invoice|subscription|metering|twilio|sendgrid|openai|anthropic|cost|usage'; then
|
||||||
|
flags+=("billing-external-api")
|
||||||
|
fi
|
||||||
|
if printf "%s" "$scan_text" | grep -Eiq 'auth|authentication|authorization|oauth|jwt|sso|encryption|crypto|tls|ssl|mfa|rbac|permission|权限|登录|认证'; then
|
||||||
|
flags+=("security-critical")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${#flags[@]}" -eq 0 ]; then
|
||||||
|
echo "EHRB: No risk flags detected"
|
||||||
|
else
|
||||||
|
echo "EHRB: Risk flags detected (require explicit user confirmation):"
|
||||||
|
for f in ${flags[@]+"${flags[@]}"}; do
|
||||||
|
echo " - $f"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$dry_run" -eq 0 ]; then
|
||||||
|
list_value="$(render_inline_list ${flags[@]+"${flags[@]}"})"
|
||||||
|
write_ehrb_flags "$list_value"
|
||||||
|
echo "Written to: $STATE_FILE (ehrb_flags: $list_value)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$fail_on_flags" -eq 1 ] && [ "${#flags[@]}" -gt 0 ]; then
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
135
skills/sparv/scripts/failure-tracker.sh
Executable file
135
skills/sparv/scripts/failure-tracker.sh
Executable file
@@ -0,0 +1,135 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# SPARV 3-Failure Protocol Tracker
|
||||||
|
# Maintains consecutive_failures and escalates when reaching 3.
|
||||||
|
# Notes are appended to journal.md (unified log).
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "$SCRIPT_DIR/lib/state-lock.sh"
|
||||||
|
|
||||||
|
THRESHOLD=3
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: failure-tracker.sh <command> [options]
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
status Show current consecutive_failures and protocol level
|
||||||
|
fail [--note TEXT] Increment consecutive_failures (exit 3 when reaching threshold)
|
||||||
|
reset Set consecutive_failures to 0
|
||||||
|
|
||||||
|
Auto-detects active session in .sparv/plan/<session_id>/
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
die() {
|
||||||
|
echo "❌ $*" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
require_state() {
|
||||||
|
# Auto-detect session (sets SPARV_DIR, STATE_FILE, JOURNAL_FILE)
|
||||||
|
sparv_require_state_file
|
||||||
|
sparv_state_validate_or_die
|
||||||
|
}
|
||||||
|
|
||||||
|
append_journal() {
|
||||||
|
local level="$1"
|
||||||
|
local note="${2:-}"
|
||||||
|
local ts
|
||||||
|
ts="$(date '+%Y-%m-%d %H:%M')"
|
||||||
|
|
||||||
|
[ -f "$JOURNAL_FILE" ] || sparv_die "Cannot find $JOURNAL_FILE; run init-session.sh first"
|
||||||
|
|
||||||
|
{
|
||||||
|
echo
|
||||||
|
echo "## Failure Protocol - $ts"
|
||||||
|
echo "- level: $level"
|
||||||
|
if [ -n "$note" ]; then
|
||||||
|
echo "- note: $note"
|
||||||
|
fi
|
||||||
|
} >>"$JOURNAL_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
protocol_level() {
|
||||||
|
local count="$1"
|
||||||
|
if [ "$count" -le 0 ]; then
|
||||||
|
echo "0"
|
||||||
|
elif [ "$count" -eq 1 ]; then
|
||||||
|
echo "1"
|
||||||
|
elif [ "$count" -eq 2 ]; then
|
||||||
|
echo "2"
|
||||||
|
else
|
||||||
|
echo "3"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd="${1:-status}"
|
||||||
|
shift || true
|
||||||
|
|
||||||
|
note=""
|
||||||
|
case "$cmd" in
|
||||||
|
-h|--help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
require_state
|
||||||
|
current="$(sparv_yaml_get_int consecutive_failures 0)"
|
||||||
|
level="$(protocol_level "$current")"
|
||||||
|
echo "consecutive_failures: $current"
|
||||||
|
case "$level" in
|
||||||
|
0) echo "protocol: clean (no failures)" ;;
|
||||||
|
1) echo "protocol: Attempt 1 - Diagnose and fix" ;;
|
||||||
|
2) echo "protocol: Attempt 2 - Alternative approach" ;;
|
||||||
|
3) echo "protocol: Attempt 3 - Escalate (pause, document, ask user)" ;;
|
||||||
|
esac
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
fail)
|
||||||
|
require_state
|
||||||
|
if [ "${1:-}" = "--note" ]; then
|
||||||
|
[ $# -ge 2 ] || die "--note requires an argument"
|
||||||
|
note="$2"
|
||||||
|
shift 2
|
||||||
|
else
|
||||||
|
note="$*"
|
||||||
|
shift $#
|
||||||
|
fi
|
||||||
|
[ "$#" -eq 0 ] || die "Unknown argument: $1 (use --help for usage)"
|
||||||
|
|
||||||
|
current="$(sparv_yaml_get_int consecutive_failures 0)"
|
||||||
|
new_count=$((current + 1))
|
||||||
|
sparv_yaml_set_int consecutive_failures "$new_count"
|
||||||
|
|
||||||
|
level="$(protocol_level "$new_count")"
|
||||||
|
case "$level" in
|
||||||
|
1)
|
||||||
|
echo "Attempt 1/3: Diagnose and fix"
|
||||||
|
[ -n "$note" ] && append_journal "1" "$note"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
2)
|
||||||
|
echo "Attempt 2/3: Alternative approach"
|
||||||
|
[ -n "$note" ] && append_journal "2" "$note"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
3)
|
||||||
|
echo "Attempt 3/3: Escalate"
|
||||||
|
echo "3-Failure Protocol triggered: pause, document blocker and attempted solutions, request user decision."
|
||||||
|
append_journal "3" "${note:-"(no note)"}"
|
||||||
|
exit "$THRESHOLD"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
reset)
|
||||||
|
require_state
|
||||||
|
sparv_yaml_set_int consecutive_failures 0
|
||||||
|
echo "consecutive_failures reset to 0"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
die "Unknown command: $cmd (use --help for usage)"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
235
skills/sparv/scripts/init-session.sh
Executable file
235
skills/sparv/scripts/init-session.sh
Executable file
@@ -0,0 +1,235 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# SPARV Session Initialization
|
||||||
|
# Creates .sparv/plan/<session_id>/ with state.yaml and journal.md
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "$SCRIPT_DIR/lib/state-lock.sh"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: init-session.sh [--force] [feature_name]
|
||||||
|
|
||||||
|
Creates .sparv/plan/<session_id>/ directory:
|
||||||
|
- state.yaml (session state)
|
||||||
|
- journal.md (unified log)
|
||||||
|
|
||||||
|
Also initializes:
|
||||||
|
- .sparv/history/index.md (if not exists)
|
||||||
|
- .sparv/CHANGELOG.md (if not exists)
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--force Archive current session and start new one
|
||||||
|
feature_name Optional feature name for the session
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
SPARV_ROOT=".sparv"
|
||||||
|
PLAN_DIR="$SPARV_ROOT/plan"
|
||||||
|
HISTORY_DIR="$SPARV_ROOT/history"
|
||||||
|
|
||||||
|
force=0
|
||||||
|
feature_name=""
|
||||||
|
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
-h|--help) usage; exit 0 ;;
|
||||||
|
--force) force=1; shift ;;
|
||||||
|
-*) usage >&2; exit 1 ;;
|
||||||
|
*) feature_name="$1"; shift ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Find current active session
|
||||||
|
find_active_session() {
|
||||||
|
if [ -d "$PLAN_DIR" ]; then
|
||||||
|
local session
|
||||||
|
session="$(ls -1 "$PLAN_DIR" 2>/dev/null | head -1)"
|
||||||
|
if [ -n "$session" ] && [ -f "$PLAN_DIR/$session/state.yaml" ]; then
|
||||||
|
echo "$session"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Archive a session to history
|
||||||
|
archive_session() {
|
||||||
|
local session_id="$1"
|
||||||
|
local src_dir="$PLAN_DIR/$session_id"
|
||||||
|
local dst_dir="$HISTORY_DIR/$session_id"
|
||||||
|
|
||||||
|
[ -d "$src_dir" ] || return 0
|
||||||
|
|
||||||
|
mkdir -p "$HISTORY_DIR"
|
||||||
|
mv "$src_dir" "$dst_dir"
|
||||||
|
|
||||||
|
# Update index.md
|
||||||
|
update_history_index "$session_id"
|
||||||
|
|
||||||
|
echo "📦 Archived: $dst_dir"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update history/index.md
|
||||||
|
update_history_index() {
|
||||||
|
local session_id="$1"
|
||||||
|
local index_file="$HISTORY_DIR/index.md"
|
||||||
|
local state_file="$HISTORY_DIR/$session_id/state.yaml"
|
||||||
|
|
||||||
|
# Get feature name from state.yaml
|
||||||
|
local fname=""
|
||||||
|
if [ -f "$state_file" ]; then
|
||||||
|
fname="$(grep -E '^feature_name:' "$state_file" | sed -E 's/^feature_name:[[:space:]]*"?([^"]*)"?$/\1/' || true)"
|
||||||
|
fi
|
||||||
|
[ -z "$fname" ] && fname="unnamed"
|
||||||
|
|
||||||
|
local month="${session_id:0:6}"
|
||||||
|
local formatted_month="${month:0:4}-${month:4:2}"
|
||||||
|
local timestamp="${session_id:0:12}"
|
||||||
|
|
||||||
|
# Append to index
|
||||||
|
if [ -f "$index_file" ]; then
|
||||||
|
# Add to monthly section if not exists
|
||||||
|
if ! grep -q "### $formatted_month" "$index_file"; then
|
||||||
|
echo -e "\n### $formatted_month\n" >> "$index_file"
|
||||||
|
fi
|
||||||
|
echo "- \`${session_id}\` - $fname" >> "$index_file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize history/index.md if not exists
|
||||||
|
init_history_index() {
|
||||||
|
local index_file="$HISTORY_DIR/index.md"
|
||||||
|
[ -f "$index_file" ] && return 0
|
||||||
|
|
||||||
|
mkdir -p "$HISTORY_DIR"
|
||||||
|
cat > "$index_file" << 'EOF'
|
||||||
|
# History Index
|
||||||
|
|
||||||
|
This file records all completed sessions for traceability.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Index
|
||||||
|
|
||||||
|
| Timestamp | Feature | Type | Status | Path |
|
||||||
|
|-----------|---------|------|--------|------|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Monthly Archive
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize CHANGELOG.md if not exists
|
||||||
|
init_changelog() {
|
||||||
|
local changelog="$SPARV_ROOT/CHANGELOG.md"
|
||||||
|
[ -f "$changelog" ] && return 0
|
||||||
|
|
||||||
|
cat > "$changelog" << 'EOF'
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
Format based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize kb.md (knowledge base) if not exists
|
||||||
|
init_kb() {
|
||||||
|
local kb_file="$SPARV_ROOT/kb.md"
|
||||||
|
[ -f "$kb_file" ] && return 0
|
||||||
|
|
||||||
|
cat > "$kb_file" << 'EOF'
|
||||||
|
# Knowledge Base
|
||||||
|
|
||||||
|
Cross-session knowledge accumulated during SPARV workflows.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Patterns
|
||||||
|
|
||||||
|
<!-- Reusable code patterns discovered -->
|
||||||
|
|
||||||
|
## Decisions
|
||||||
|
|
||||||
|
<!-- Architectural choices + rationale -->
|
||||||
|
<!-- Format: - [YYYY-MM-DD]: decision | rationale -->
|
||||||
|
|
||||||
|
## Gotchas
|
||||||
|
|
||||||
|
<!-- Common pitfalls + solutions -->
|
||||||
|
<!-- Format: - [issue]: cause | solution -->
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for active session
|
||||||
|
active_session="$(find_active_session)"
|
||||||
|
|
||||||
|
if [ -n "$active_session" ]; then
|
||||||
|
if [ "$force" -eq 0 ]; then
|
||||||
|
echo "⚠️ Active session exists: $active_session"
|
||||||
|
echo " Use --force to archive and start new session"
|
||||||
|
echo " Or run: archive-session.sh"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
archive_session "$active_session"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate new session ID
|
||||||
|
SESSION_ID=$(date +%Y%m%d%H%M%S)
|
||||||
|
SESSION_DIR="$PLAN_DIR/$SESSION_ID"
|
||||||
|
|
||||||
|
# Create directory structure
|
||||||
|
mkdir -p "$SESSION_DIR"
|
||||||
|
mkdir -p "$HISTORY_DIR"
|
||||||
|
|
||||||
|
# Initialize global files
|
||||||
|
init_history_index
|
||||||
|
init_changelog
|
||||||
|
init_kb
|
||||||
|
|
||||||
|
# Create state.yaml
|
||||||
|
cat > "$SESSION_DIR/state.yaml" << EOF
|
||||||
|
session_id: "$SESSION_ID"
|
||||||
|
feature_name: "$feature_name"
|
||||||
|
current_phase: "specify"
|
||||||
|
action_count: 0
|
||||||
|
consecutive_failures: 0
|
||||||
|
max_iterations: 12
|
||||||
|
iteration_count: 0
|
||||||
|
completion_promise: ""
|
||||||
|
ehrb_flags: []
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create journal.md
|
||||||
|
cat > "$SESSION_DIR/journal.md" << EOF
|
||||||
|
# SPARV Journal
|
||||||
|
Session: $SESSION_ID
|
||||||
|
Feature: $feature_name
|
||||||
|
Created: $(date '+%Y-%m-%d %H:%M')
|
||||||
|
|
||||||
|
## Plan
|
||||||
|
<!-- Task breakdown, sub-issues, success criteria -->
|
||||||
|
|
||||||
|
## Progress
|
||||||
|
<!-- Auto-updated every 2 actions -->
|
||||||
|
|
||||||
|
## Findings
|
||||||
|
<!-- Learnings, patterns, discoveries -->
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Verify files created
|
||||||
|
if [ ! -f "$SESSION_DIR/state.yaml" ] || [ ! -f "$SESSION_DIR/journal.md" ]; then
|
||||||
|
echo "❌ Failed to create files"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ SPARV session: $SESSION_ID"
|
||||||
|
[ -n "$feature_name" ] && echo "📝 Feature: $feature_name"
|
||||||
|
echo "📁 $SESSION_DIR/state.yaml"
|
||||||
|
echo "📁 $SESSION_DIR/journal.md"
|
||||||
143
skills/sparv/scripts/lib/state-lock.sh
Executable file
143
skills/sparv/scripts/lib/state-lock.sh
Executable file
@@ -0,0 +1,143 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Shared helpers for .sparv state operations.
|
||||||
|
# Supports new directory structure: .sparv/plan/<session_id>/
|
||||||
|
|
||||||
|
sparv_die() {
|
||||||
|
echo "❌ $*" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Find active session directory
|
||||||
|
sparv_find_active_session() {
|
||||||
|
local plan_dir=".sparv/plan"
|
||||||
|
if [ -d "$plan_dir" ]; then
|
||||||
|
local session
|
||||||
|
session="$(ls -1 "$plan_dir" 2>/dev/null | head -1)"
|
||||||
|
if [ -n "$session" ] && [ -f "$plan_dir/$session/state.yaml" ]; then
|
||||||
|
echo "$plan_dir/$session"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Auto-detect SPARV_DIR and STATE_FILE
|
||||||
|
sparv_auto_detect() {
|
||||||
|
local session_dir
|
||||||
|
session_dir="$(sparv_find_active_session)"
|
||||||
|
if [ -n "$session_dir" ]; then
|
||||||
|
SPARV_DIR="$session_dir"
|
||||||
|
STATE_FILE="$session_dir/state.yaml"
|
||||||
|
JOURNAL_FILE="$session_dir/journal.md"
|
||||||
|
export SPARV_DIR STATE_FILE JOURNAL_FILE
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
sparv_require_state_env() {
|
||||||
|
if [ -z "${SPARV_DIR:-}" ] || [ -z "${STATE_FILE:-}" ]; then
|
||||||
|
if ! sparv_auto_detect; then
|
||||||
|
sparv_die "No active session found; run init-session.sh first"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
sparv_require_state_file() {
|
||||||
|
sparv_require_state_env
|
||||||
|
[ -f "$STATE_FILE" ] || sparv_die "File not found: $STATE_FILE; run init-session.sh first"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Read a YAML value (simple key: value format)
|
||||||
|
sparv_yaml_get() {
|
||||||
|
local key="$1"
|
||||||
|
local default="${2:-}"
|
||||||
|
sparv_require_state_file
|
||||||
|
|
||||||
|
local line value
|
||||||
|
line="$(grep -E "^${key}:" "$STATE_FILE" | head -n 1 || true)"
|
||||||
|
if [ -z "$line" ]; then
|
||||||
|
printf "%s" "$default"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
value="${line#${key}:}"
|
||||||
|
value="$(printf "%s" "$value" | sed -E 's/^[[:space:]]+//; s/^"//; s/"$//')"
|
||||||
|
printf "%s" "$value"
|
||||||
|
}
|
||||||
|
|
||||||
|
sparv_yaml_get_int() {
|
||||||
|
local key="$1"
|
||||||
|
local default="${2:-0}"
|
||||||
|
local value
|
||||||
|
value="$(sparv_yaml_get "$key" "$default")"
|
||||||
|
if printf "%s" "$value" | grep -Eq '^[0-9]+$'; then
|
||||||
|
printf "%s" "$value"
|
||||||
|
else
|
||||||
|
printf "%s" "$default"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write a YAML value (in-place update)
|
||||||
|
sparv_yaml_set_raw() {
|
||||||
|
local key="$1"
|
||||||
|
local raw_value="$2"
|
||||||
|
sparv_require_state_file
|
||||||
|
|
||||||
|
local tmp
|
||||||
|
tmp="$(mktemp)"
|
||||||
|
|
||||||
|
awk -v key="$key" -v repl="${key}: ${raw_value}" '
|
||||||
|
BEGIN { in_block = 0; replaced = 0 }
|
||||||
|
{
|
||||||
|
if (in_block) {
|
||||||
|
if ($0 ~ /^[[:space:]]*-/) next
|
||||||
|
in_block = 0
|
||||||
|
}
|
||||||
|
if ($0 ~ ("^" key ":")) {
|
||||||
|
print repl
|
||||||
|
in_block = 1
|
||||||
|
replaced = 1
|
||||||
|
next
|
||||||
|
}
|
||||||
|
print
|
||||||
|
}
|
||||||
|
END {
|
||||||
|
if (!replaced) print repl
|
||||||
|
}
|
||||||
|
' "$STATE_FILE" >"$tmp"
|
||||||
|
|
||||||
|
mv -f "$tmp" "$STATE_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
sparv_yaml_set_int() {
|
||||||
|
local key="$1"
|
||||||
|
local value="$2"
|
||||||
|
[ "$value" -ge 0 ] 2>/dev/null || sparv_die "$key must be a non-negative integer"
|
||||||
|
sparv_yaml_set_raw "$key" "$value"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate state.yaml has required fields (4 core fields only)
|
||||||
|
sparv_state_validate() {
|
||||||
|
sparv_require_state_file
|
||||||
|
|
||||||
|
local missing=0
|
||||||
|
local key
|
||||||
|
|
||||||
|
for key in session_id current_phase action_count consecutive_failures; do
|
||||||
|
grep -Eq "^${key}:" "$STATE_FILE" || missing=1
|
||||||
|
done
|
||||||
|
|
||||||
|
local phase
|
||||||
|
phase="$(sparv_yaml_get current_phase "")"
|
||||||
|
case "$phase" in
|
||||||
|
specify|plan|act|review|vault) ;;
|
||||||
|
*) missing=1 ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
[ "$missing" -eq 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
sparv_state_validate_or_die() {
|
||||||
|
if ! sparv_state_validate; then
|
||||||
|
sparv_die "Corrupted state.yaml: $STATE_FILE. Run init-session.sh --force to rebuild."
|
||||||
|
fi
|
||||||
|
}
|
||||||
127
skills/sparv/scripts/reboot-test.sh
Executable file
127
skills/sparv/scripts/reboot-test.sh
Executable file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# SPARV 3-Question Reboot Test Script
|
||||||
|
# Prints (and optionally validates) the "3 questions" using the current session state.
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "$SCRIPT_DIR/lib/state-lock.sh"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: reboot-test.sh [options]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--strict Exit non-zero if critical answers are missing or unsafe
|
||||||
|
-h, --help Show this help
|
||||||
|
|
||||||
|
Auto-detects active session in .sparv/plan/<session_id>/
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
die() {
|
||||||
|
echo "❌ $*" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
tail_file() {
|
||||||
|
local path="$1"
|
||||||
|
local lines="${2:-20}"
|
||||||
|
if [ -f "$path" ]; then
|
||||||
|
tail -n "$lines" "$path"
|
||||||
|
else
|
||||||
|
echo "(missing: $path)"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
strict=0
|
||||||
|
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
-h|--help) usage; exit 0 ;;
|
||||||
|
--strict) strict=1; shift ;;
|
||||||
|
*) die "Unknown argument: $1 (use --help for usage)" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Auto-detect session (sets SPARV_DIR, STATE_FILE, JOURNAL_FILE)
|
||||||
|
sparv_require_state_file
|
||||||
|
sparv_state_validate_or_die
|
||||||
|
|
||||||
|
session_id="$(sparv_yaml_get session_id "")"
|
||||||
|
feature_name="$(sparv_yaml_get feature_name "")"
|
||||||
|
current_phase="$(sparv_yaml_get current_phase "")"
|
||||||
|
completion_promise="$(sparv_yaml_get completion_promise "")"
|
||||||
|
iteration_count="$(sparv_yaml_get_int iteration_count 0)"
|
||||||
|
max_iterations="$(sparv_yaml_get_int max_iterations 0)"
|
||||||
|
consecutive_failures="$(sparv_yaml_get_int consecutive_failures 0)"
|
||||||
|
ehrb_flags="$(sparv_yaml_get ehrb_flags "")"
|
||||||
|
|
||||||
|
case "$current_phase" in
|
||||||
|
specify) next_phase="plan" ;;
|
||||||
|
plan) next_phase="act" ;;
|
||||||
|
act) next_phase="review" ;;
|
||||||
|
review) next_phase="vault" ;;
|
||||||
|
vault) next_phase="done" ;;
|
||||||
|
*) next_phase="unknown" ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo "== 3-Question Reboot Test =="
|
||||||
|
echo "session_id: ${session_id:-"(unknown)"}"
|
||||||
|
if [ -n "$feature_name" ]; then
|
||||||
|
echo "feature_name: $feature_name"
|
||||||
|
fi
|
||||||
|
echo
|
||||||
|
echo "1) Where am I?"
|
||||||
|
echo " current_phase: ${current_phase:-"(empty)"}"
|
||||||
|
echo
|
||||||
|
echo "2) Where am I going?"
|
||||||
|
echo " next_phase: $next_phase"
|
||||||
|
echo
|
||||||
|
echo "3) How do I prove completion?"
|
||||||
|
if [ -n "$completion_promise" ]; then
|
||||||
|
echo " completion_promise: $completion_promise"
|
||||||
|
else
|
||||||
|
echo " completion_promise: (empty)"
|
||||||
|
fi
|
||||||
|
echo
|
||||||
|
echo "journal tail (20 lines):"
|
||||||
|
tail_file "$JOURNAL_FILE" 20
|
||||||
|
echo
|
||||||
|
echo "Counters: failures=$consecutive_failures, iteration=$iteration_count/$max_iterations"
|
||||||
|
if [ -n "$ehrb_flags" ] && [ "$ehrb_flags" != "[]" ]; then
|
||||||
|
echo "EHRB: $ehrb_flags"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$strict" -eq 1 ]; then
|
||||||
|
exit_code=0
|
||||||
|
|
||||||
|
case "$current_phase" in
|
||||||
|
specify|plan|act|review|vault) ;;
|
||||||
|
*) echo "❌ strict: current_phase invalid/empty: $current_phase" >&2; exit_code=1 ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -z "$completion_promise" ]; then
|
||||||
|
echo "❌ strict: completion_promise is empty; fill in a verifiable completion commitment in $STATE_FILE first." >&2
|
||||||
|
exit_code=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$max_iterations" -gt 0 ] && [ "$iteration_count" -ge "$max_iterations" ]; then
|
||||||
|
echo "❌ strict: iteration_count >= max_iterations; stop hook triggered, should pause and escalate to user." >&2
|
||||||
|
exit_code=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$consecutive_failures" -ge 3 ]; then
|
||||||
|
echo "❌ strict: consecutive_failures >= 3; 3-Failure Protocol triggered, should pause and escalate to user." >&2
|
||||||
|
exit_code=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$ehrb_flags" ] && [ "$ehrb_flags" != "[]" ]; then
|
||||||
|
echo "❌ strict: ehrb_flags not empty; EHRB risk exists, requires explicit user confirmation before continuing." >&2
|
||||||
|
exit_code=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit "$exit_code"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
55
skills/sparv/scripts/save-progress.sh
Executable file
55
skills/sparv/scripts/save-progress.sh
Executable file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# SPARV Progress Save Script
|
||||||
|
# Implements the 2-Action rule (called after each tool call; writes every 2 actions).
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "$SCRIPT_DIR/lib/state-lock.sh"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
Usage: save-progress.sh [TOOL_NAME] [RESULT]
|
||||||
|
|
||||||
|
Increments action_count and appends to journal.md every 2 actions.
|
||||||
|
Auto-detects active session in .sparv/plan/<session_id>/
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ "${1:-}" = "-h" ] || [ "${1:-}" = "--help" ]; then
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Auto-detect session (sets SPARV_DIR, STATE_FILE, JOURNAL_FILE)
|
||||||
|
sparv_require_state_file
|
||||||
|
sparv_state_validate_or_die
|
||||||
|
[ -f "$JOURNAL_FILE" ] || sparv_die "Cannot find $JOURNAL_FILE; run init-session.sh first"
|
||||||
|
|
||||||
|
# Arguments
|
||||||
|
TOOL_NAME="${1:-unknown}"
|
||||||
|
RESULT="${2:-no result}"
|
||||||
|
|
||||||
|
ACTION_COUNT="$(sparv_yaml_get_int action_count 0)"
|
||||||
|
|
||||||
|
# Increment action count
|
||||||
|
NEW_COUNT=$((ACTION_COUNT + 1))
|
||||||
|
|
||||||
|
# Update state file
|
||||||
|
sparv_yaml_set_int action_count "$NEW_COUNT"
|
||||||
|
|
||||||
|
# Only write every 2 actions
|
||||||
|
if [ $((NEW_COUNT % 2)) -ne 0 ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Append to journal
|
||||||
|
TIMESTAMP=$(date '+%H:%M')
|
||||||
|
cat >> "$JOURNAL_FILE" << EOF
|
||||||
|
|
||||||
|
## $TIMESTAMP - Action #$NEW_COUNT
|
||||||
|
- Tool: $TOOL_NAME
|
||||||
|
- Result: $RESULT
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "📝 journal.md saved: Action #$NEW_COUNT"
|
||||||
Reference in New Issue
Block a user