From 1451594ae6738b943ef559e28fc6dfd69e810fee Mon Sep 17 00:00:00 2001 From: catlog22 Date: Mon, 5 Jan 2026 23:58:23 +0800 Subject: [PATCH] feat: Add user action prompt after issue discovery and enhance environment variable support for embedding and reranker configurations --- .claude/commands/issue/discover.md | 41 +++++++++++++++++ codex-lens/src/codexlens/cli/commands.py | 56 ++++++++++++++++++++---- codex-lens/src/codexlens/config.py | 56 +++++++++++++++++++++++- codex-lens/src/codexlens/env_config.py | 11 +++-- 4 files changed, 149 insertions(+), 15 deletions(-) diff --git a/.claude/commands/issue/discover.md b/.claude/commands/issue/discover.md index b66f127b..1dfc07b6 100644 --- a/.claude/commands/issue/discover.md +++ b/.claude/commands/issue/discover.md @@ -73,6 +73,9 @@ Phase 5: Issue Generation & Summary ├─ Write to discovery-issues.jsonl ├─ Generate single summary.md from agent returns └─ Update discovery-state.json to complete + +Phase 6: User Action Prompt + └─ AskUserQuestion for next step (export/dashboard/skip) ``` ## Perspectives @@ -238,6 +241,44 @@ await updateDiscoveryState(outputDir, { }); ``` +**Phase 6: User Action Prompt** + +```javascript +// Prompt user for next action based on discovery results +const hasHighPriority = issues.some(i => i.priority === 'critical' || i.priority === 'high'); +const hasMediumFindings = prioritizedFindings.some(f => f.priority === 'medium'); + +await AskUserQuestion({ + questions: [{ + question: `Discovery complete: ${issues.length} issues generated, ${prioritizedFindings.length} total findings. What would you like to do next?`, + header: "Next Step", + multiSelect: false, + options: hasHighPriority ? [ + { label: "Export to Issues (Recommended)", description: `${issues.length} high-priority issues found - export to issue tracker for planning` }, + { label: "Open Dashboard", description: "Review findings in ccw view before exporting" }, + { label: "Skip", description: "Complete discovery without exporting" } + ] : hasMediumFindings ? [ + { label: "Open Dashboard (Recommended)", description: "Review medium-priority findings in ccw view to decide which to export" }, + { label: "Export to Issues", description: `Export ${issues.length} issues to tracker` }, + { label: "Skip", description: "Complete discovery without exporting" } + ] : [ + { label: "Skip (Recommended)", description: "No significant issues found - complete discovery" }, + { label: "Open Dashboard", description: "Review all findings in ccw view" }, + { label: "Export to Issues", description: `Export ${issues.length} issues anyway` } + ] + }] +}); + +// Handle response +if (response === "Export to Issues") { + // Append to issues.jsonl + await appendJsonl('.workflow/issues/issues.jsonl', issues); + console.log(`Exported ${issues.length} issues. Run /issue:plan to continue.`); +} else if (response === "Open Dashboard") { + console.log('Run `ccw view` and navigate to Issues > Discovery to manage findings.'); +} +``` + ### Output File Structure ``` diff --git a/codex-lens/src/codexlens/cli/commands.py b/codex-lens/src/codexlens/cli/commands.py index 3aeb7729..a121e8f4 100644 --- a/codex-lens/src/codexlens/cli/commands.py +++ b/codex-lens/src/codexlens/cli/commands.py @@ -1220,9 +1220,32 @@ def config( except (json.JSONDecodeError, OSError): pass # Settings file not readable, continue with defaults - # Environment variables override settings file - if os.getenv("RERANKER_PROVIDER"): - result["reranker_api_provider"] = os.getenv("RERANKER_PROVIDER") + # Load .env overrides from global ~/.codexlens/.env + env_overrides: Dict[str, str] = {} + try: + from codexlens.env_config import load_global_env + env_overrides = load_global_env() + except ImportError: + pass + + # Apply .env overrides (highest priority) and track them + if env_overrides.get("EMBEDDING_MODEL"): + result["embedding_model"] = env_overrides["EMBEDDING_MODEL"] + result["embedding_model_source"] = ".env" + if env_overrides.get("EMBEDDING_BACKEND"): + result["embedding_backend"] = env_overrides["EMBEDDING_BACKEND"] + result["embedding_backend_source"] = ".env" + if env_overrides.get("RERANKER_MODEL"): + result["reranker_model"] = env_overrides["RERANKER_MODEL"] + result["reranker_model_source"] = ".env" + if env_overrides.get("RERANKER_BACKEND"): + result["reranker_backend"] = env_overrides["RERANKER_BACKEND"] + result["reranker_backend_source"] = ".env" + if env_overrides.get("RERANKER_ENABLED"): + result["reranker_enabled"] = env_overrides["RERANKER_ENABLED"].lower() in ("true", "1", "yes", "on") + result["reranker_enabled_source"] = ".env" + if env_overrides.get("RERANKER_PROVIDER") or os.getenv("RERANKER_PROVIDER"): + result["reranker_api_provider"] = env_overrides.get("RERANKER_PROVIDER") or os.getenv("RERANKER_PROVIDER") if json_mode: print_json(success=True, result=result) @@ -1232,12 +1255,27 @@ def config( console.print(f" Index Directory: {result['index_dir']}") if result['env_override']: console.print(f" [dim](Override via CODEXLENS_INDEX_DIR)[/dim]") - # Show reranker settings if present - if result.get("reranker_backend"): - console.print(f"\n[bold]Reranker[/bold]") - console.print(f" Backend: {result.get('reranker_backend', 'N/A')}") - console.print(f" Model: {result.get('reranker_model', 'N/A')}") - console.print(f" Enabled: {result.get('reranker_enabled', False)}") + + # Show embedding settings + console.print(f"\n[bold]Embedding[/bold]") + backend = result.get('embedding_backend', 'fastembed') + backend_source = result.get('embedding_backend_source', 'settings.json') + console.print(f" Backend: {backend} [dim]({backend_source})[/dim]") + model = result.get('embedding_model', 'code') + model_source = result.get('embedding_model_source', 'settings.json') + console.print(f" Model: {model} [dim]({model_source})[/dim]") + + # Show reranker settings + console.print(f"\n[bold]Reranker[/bold]") + backend = result.get('reranker_backend', 'fastembed') + backend_source = result.get('reranker_backend_source', 'settings.json') + console.print(f" Backend: {backend} [dim]({backend_source})[/dim]") + model = result.get('reranker_model', 'N/A') + model_source = result.get('reranker_model_source', 'settings.json') + console.print(f" Model: {model} [dim]({model_source})[/dim]") + enabled = result.get('reranker_enabled', False) + enabled_source = result.get('reranker_enabled_source', 'settings.json') + console.print(f" Enabled: {enabled} [dim]({enabled_source})[/dim]") elif action == "set": if not key: diff --git a/codex-lens/src/codexlens/config.py b/codex-lens/src/codexlens/config.py index c0a3f929..dd59ef06 100644 --- a/codex-lens/src/codexlens/config.py +++ b/codex-lens/src/codexlens/config.py @@ -339,11 +339,11 @@ class Config: self.enable_cross_encoder_rerank = reranker["enabled"] if "backend" in reranker: backend = reranker["backend"] - if backend in {"onnx", "api", "litellm", "legacy"}: + if backend in {"fastembed", "onnx", "api", "litellm", "legacy"}: self.reranker_backend = backend else: log.warning( - "Invalid reranker backend in %s: %r (expected 'onnx', 'api', 'litellm', or 'legacy')", + "Invalid reranker backend in %s: %r (expected 'fastembed', 'onnx', 'api', 'litellm', or 'legacy')", self.settings_path, backend, ) @@ -383,6 +383,58 @@ class Config: exc, ) + # Apply .env overrides (highest priority) + self._apply_env_overrides() + + def _apply_env_overrides(self) -> None: + """Apply environment variable overrides from .env file. + + Priority: default → settings.json → .env (highest) + + Supported variables: + EMBEDDING_MODEL: Override embedding model/profile + EMBEDDING_BACKEND: Override embedding backend (fastembed/litellm) + RERANKER_MODEL: Override reranker model + RERANKER_BACKEND: Override reranker backend + RERANKER_ENABLED: Override reranker enabled state (true/false) + """ + from .env_config import load_global_env + + env_vars = load_global_env() + if not env_vars: + return + + # Embedding overrides + if "EMBEDDING_MODEL" in env_vars: + self.embedding_model = env_vars["EMBEDDING_MODEL"] + log.debug("Overriding embedding_model from .env: %s", self.embedding_model) + + if "EMBEDDING_BACKEND" in env_vars: + backend = env_vars["EMBEDDING_BACKEND"].lower() + if backend in {"fastembed", "litellm"}: + self.embedding_backend = backend + log.debug("Overriding embedding_backend from .env: %s", backend) + else: + log.warning("Invalid EMBEDDING_BACKEND in .env: %r", backend) + + # Reranker overrides + if "RERANKER_MODEL" in env_vars: + self.reranker_model = env_vars["RERANKER_MODEL"] + log.debug("Overriding reranker_model from .env: %s", self.reranker_model) + + if "RERANKER_BACKEND" in env_vars: + backend = env_vars["RERANKER_BACKEND"].lower() + if backend in {"fastembed", "onnx", "api", "litellm", "legacy"}: + self.reranker_backend = backend + log.debug("Overriding reranker_backend from .env: %s", backend) + else: + log.warning("Invalid RERANKER_BACKEND in .env: %r", backend) + + if "RERANKER_ENABLED" in env_vars: + value = env_vars["RERANKER_ENABLED"].lower() + self.enable_cross_encoder_rerank = value in {"true", "1", "yes", "on"} + log.debug("Overriding reranker_enabled from .env: %s", self.enable_cross_encoder_rerank) + @classmethod def load(cls) -> "Config": """Load config with settings from file.""" diff --git a/codex-lens/src/codexlens/env_config.py b/codex-lens/src/codexlens/env_config.py index 156378f5..992a04dd 100644 --- a/codex-lens/src/codexlens/env_config.py +++ b/codex-lens/src/codexlens/env_config.py @@ -20,15 +20,18 @@ log = logging.getLogger(__name__) # Supported environment variables with descriptions ENV_VARS = { - # Reranker API configuration + # Reranker configuration (overrides settings.json) + "RERANKER_MODEL": "Reranker model name (overrides settings.json)", + "RERANKER_BACKEND": "Reranker backend: fastembed, onnx, api, litellm, legacy", + "RERANKER_ENABLED": "Enable reranker: true/false", "RERANKER_API_KEY": "API key for reranker service (SiliconFlow/Cohere/Jina)", "RERANKER_API_BASE": "Base URL for reranker API (overrides provider default)", "RERANKER_PROVIDER": "Reranker provider: siliconflow, cohere, jina", - "RERANKER_MODEL": "Reranker model name", - # Embedding API configuration + # Embedding configuration (overrides settings.json) + "EMBEDDING_MODEL": "Embedding model/profile name (overrides settings.json)", + "EMBEDDING_BACKEND": "Embedding backend: fastembed, litellm", "EMBEDDING_API_KEY": "API key for embedding service", "EMBEDDING_API_BASE": "Base URL for embedding API", - "EMBEDDING_MODEL": "Embedding model name", # LiteLLM configuration "LITELLM_API_KEY": "API key for LiteLLM", "LITELLM_API_BASE": "Base URL for LiteLLM",