Enhance UX and Coordinator Role Constraints in Skills Documentation

- Added detailed constraints for the Coordinator role in the team UX improvement skill, emphasizing orchestration responsibilities and workflow management.
- Updated test cases in DashboardToolbar, useIssues, and useWebSocket to improve reliability and clarity.
- Introduced new tests for configStore and ignore patterns in Codex Lens to ensure proper functionality and configuration handling.
- Enhanced smart search functionality with improved embedding selection logic and added tests for various scenarios.
- Updated installation and usage documentation to reflect changes in directory structure and role specifications.
This commit is contained in:
catlog22
2026-03-08 23:43:44 +08:00
parent f3ae78f95e
commit 61ea9d47a6
110 changed files with 1516 additions and 218 deletions

View File

@@ -114,6 +114,8 @@ class Config:
# Indexing/search optimizations
global_symbol_index_enabled: bool = True # Enable project-wide symbol index fast path
enable_merkle_detection: bool = True # Enable content-hash based incremental indexing
ignore_patterns: List[str] = field(default_factory=list) # Additional directory ignore patterns for indexing
extension_filters: List[str] = field(default_factory=list) # Reserved for file-level filtering config
# Graph expansion (search-time, uses precomputed neighbors)
enable_graph_expansion: bool = False
@@ -342,6 +344,8 @@ class Config:
"batch_size_max": self.api_batch_size_max,
"chars_per_token_estimate": self.chars_per_token_estimate,
},
"ignore_patterns": self.ignore_patterns,
"extension_filters": self.extension_filters,
}
with open(self.settings_path, "w", encoding="utf-8") as f:
json.dump(settings, f, indent=2)
@@ -638,6 +642,34 @@ class Config:
raw_types,
)
raw_ignore_patterns = settings.get("ignore_patterns")
if raw_ignore_patterns is not None:
if isinstance(raw_ignore_patterns, list):
self.ignore_patterns = [
str(item).strip() for item in raw_ignore_patterns
if str(item).strip()
]
else:
log.warning(
"Invalid ignore_patterns in %s: %r (expected list)",
self.settings_path,
raw_ignore_patterns,
)
raw_extension_filters = settings.get("extension_filters")
if raw_extension_filters is not None:
if isinstance(raw_extension_filters, list):
self.extension_filters = [
str(item).strip() for item in raw_extension_filters
if str(item).strip()
]
else:
log.warning(
"Invalid extension_filters in %s: %r (expected list)",
self.settings_path,
raw_extension_filters,
)
# Load API settings
api = settings.get("api", {})
if "max_workers" in api:

View File

@@ -6,6 +6,7 @@ Each directory maintains its own _index.db with files and subdirectory links.
from __future__ import annotations
import fnmatch
import logging
import os
import re
@@ -24,6 +25,46 @@ from codexlens.storage.path_mapper import PathMapper
from codexlens.storage.registry import ProjectInfo, RegistryStore
DEFAULT_IGNORE_DIRS: Set[str] = {
".git",
".svn",
".hg",
".venv",
"venv",
"env",
"node_modules",
"bower_components",
"__pycache__",
".pytest_cache",
".mypy_cache",
".ruff_cache",
".npm",
".yarn",
".codexlens",
".idea",
".vscode",
".vs",
".eclipse",
"dist",
"build",
"out",
"target",
"bin",
"obj",
"_build",
"coverage",
"htmlcov",
".cache",
".parcel-cache",
".turbo",
".next",
".nuxt",
"logs",
"tmp",
"temp",
}
@dataclass
class BuildResult:
"""Complete build operation result."""
@@ -67,16 +108,7 @@ class IndexTreeBuilder:
"""
# Directories to skip during indexing
IGNORE_DIRS: Set[str] = {
".git",
".venv",
"venv",
"node_modules",
"__pycache__",
".codexlens",
".idea",
".vscode",
}
IGNORE_DIRS: Set[str] = DEFAULT_IGNORE_DIRS
def __init__(
self, registry: RegistryStore, mapper: PathMapper, config: Config = None, incremental: bool = True
@@ -95,6 +127,37 @@ class IndexTreeBuilder:
self.parser_factory = ParserFactory(self.config)
self.logger = logging.getLogger(__name__)
self.incremental = incremental
self.ignore_patterns = self._resolve_ignore_patterns()
def _resolve_ignore_patterns(self) -> Tuple[str, ...]:
configured_patterns = getattr(self.config, "ignore_patterns", None)
raw_patterns = configured_patterns if configured_patterns else list(DEFAULT_IGNORE_DIRS)
cleaned: List[str] = []
for item in raw_patterns:
pattern = str(item).strip().replace('\\', '/').rstrip('/')
if pattern:
cleaned.append(pattern)
return tuple(dict.fromkeys(cleaned))
def _is_ignored_dir(self, dir_path: Path, source_root: Optional[Path] = None) -> bool:
name = dir_path.name
if name.startswith('.'):
return True
rel_path: Optional[str] = None
if source_root is not None:
try:
rel_path = dir_path.relative_to(source_root).as_posix()
except ValueError:
rel_path = None
for pattern in self.ignore_patterns:
if pattern == name or fnmatch.fnmatch(name, pattern):
return True
if rel_path and (pattern == rel_path or fnmatch.fnmatch(rel_path, pattern)):
return True
return False
def build(
self,
@@ -377,10 +440,11 @@ class IndexTreeBuilder:
for root, dirnames, _ in os.walk(source_root):
# Filter out ignored directories
root_path = Path(root)
dirnames[:] = [
d
for d in dirnames
if d not in self.IGNORE_DIRS and not d.startswith(".")
if not self._is_ignored_dir(root_path / d, source_root)
]
root_path = Path(root)
@@ -390,7 +454,7 @@ class IndexTreeBuilder:
continue
# Check if this directory should be indexed
if not self._should_index_dir(root_path, languages):
if not self._should_index_dir(root_path, languages, source_root=source_root):
continue
# Calculate depth relative to source_root
@@ -406,7 +470,7 @@ class IndexTreeBuilder:
return dirs_by_depth
def _should_index_dir(self, dir_path: Path, languages: List[str] = None) -> bool:
def _should_index_dir(self, dir_path: Path, languages: List[str] = None, source_root: Optional[Path] = None) -> bool:
"""Check if directory should be indexed.
A directory is indexed if:
@@ -423,7 +487,7 @@ class IndexTreeBuilder:
True if directory should be indexed
"""
# Check directory name
if dir_path.name in self.IGNORE_DIRS or dir_path.name.startswith("."):
if self._is_ignored_dir(dir_path, source_root):
return False
# Check for supported files in this directory
@@ -436,15 +500,15 @@ class IndexTreeBuilder:
for item in dir_path.iterdir():
if not item.is_dir():
continue
if item.name in self.IGNORE_DIRS or item.name.startswith("."):
if self._is_ignored_dir(item, source_root):
continue
# Recursively check subdirectories
if self._has_indexable_files_recursive(item, languages):
if self._has_indexable_files_recursive(item, languages, source_root=source_root):
return True
return False
def _has_indexable_files_recursive(self, dir_path: Path, languages: List[str] = None) -> bool:
def _has_indexable_files_recursive(self, dir_path: Path, languages: List[str] = None, source_root: Optional[Path] = None) -> bool:
"""Check if directory or any subdirectory has indexable files.
Args:
@@ -464,9 +528,9 @@ class IndexTreeBuilder:
for item in dir_path.iterdir():
if not item.is_dir():
continue
if item.name in self.IGNORE_DIRS or item.name.startswith("."):
if self._is_ignored_dir(item, source_root):
continue
if self._has_indexable_files_recursive(item, languages):
if self._has_indexable_files_recursive(item, languages, source_root=source_root):
return True
except PermissionError:
pass
@@ -520,6 +584,7 @@ class IndexTreeBuilder:
"static_graph_enabled": self.config.static_graph_enabled,
"static_graph_relationship_types": self.config.static_graph_relationship_types,
"use_astgrep": getattr(self.config, "use_astgrep", False),
"ignore_patterns": list(getattr(self.config, "ignore_patterns", [])),
}
worker_args = [
@@ -666,8 +731,7 @@ class IndexTreeBuilder:
d.name
for d in dir_path.iterdir()
if d.is_dir()
and d.name not in self.IGNORE_DIRS
and not d.name.startswith(".")
and not self._is_ignored_dir(d)
]
store.update_merkle_root()
@@ -963,6 +1027,19 @@ def _compute_graph_neighbors(
# === Worker Function for ProcessPoolExecutor ===
def _matches_ignore_patterns(path: Path, patterns: List[str]) -> bool:
name = path.name
if name.startswith('.'):
return True
for pattern in patterns:
normalized = str(pattern).strip().replace('\\', '/').rstrip('/')
if not normalized:
continue
if normalized == name or fnmatch.fnmatch(name, normalized):
return True
return False
def _build_dir_worker(args: tuple) -> DirBuildResult:
"""Worker function for parallel directory building.
@@ -986,6 +1063,7 @@ def _build_dir_worker(args: tuple) -> DirBuildResult:
static_graph_enabled=bool(config_dict.get("static_graph_enabled", False)),
static_graph_relationship_types=list(config_dict.get("static_graph_relationship_types", ["imports", "inherits"])),
use_astgrep=bool(config_dict.get("use_astgrep", False)),
ignore_patterns=list(config_dict.get("ignore_patterns", [])),
)
parser_factory = ParserFactory(config)
@@ -1064,21 +1142,11 @@ def _build_dir_worker(args: tuple) -> DirBuildResult:
_compute_graph_neighbors(store)
# Get subdirectories
ignore_dirs = {
".git",
".venv",
"venv",
"node_modules",
"__pycache__",
".codexlens",
".idea",
".vscode",
}
ignore_patterns = list(config_dict.get("ignore_patterns", [])) or list(DEFAULT_IGNORE_DIRS)
subdirs = [
d.name
for d in dir_path.iterdir()
if d.is_dir() and d.name not in ignore_dirs and not d.name.startswith(".")
if d.is_dir() and not _matches_ignore_patterns(d, ignore_patterns)
]
store.update_merkle_root()

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
import json
from pathlib import Path
from codexlens.config import Config
def test_load_settings_reads_ignore_patterns_and_extension_filters(tmp_path: Path) -> None:
settings_path = tmp_path / "settings.json"
settings_path.write_text(
json.dumps(
{
"ignore_patterns": ["frontend/dist", "coverage"],
"extension_filters": ["*.min.js", "*.map"],
}
),
encoding="utf-8",
)
config = Config(data_dir=tmp_path)
assert config.ignore_patterns == ["frontend/dist", "coverage"]
assert config.extension_filters == ["*.min.js", "*.map"]

View File

@@ -0,0 +1,86 @@
from __future__ import annotations
from pathlib import Path
from unittest.mock import MagicMock
from codexlens.config import Config
from codexlens.storage.index_tree import IndexTreeBuilder
def _relative_dirs(source_root: Path, dirs_by_depth: dict[int, list[Path]]) -> set[str]:
return {
path.relative_to(source_root).as_posix()
for paths in dirs_by_depth.values()
for path in paths
if path != source_root
}
def test_collect_dirs_by_depth_skips_common_build_artifact_dirs(tmp_path: Path) -> None:
src_dir = tmp_path / "src"
src_dir.mkdir()
(src_dir / "app.py").write_text("print('ok')\n", encoding="utf-8")
for artifact_dir in ["dist", "build", "coverage", ".next", "out", ".turbo", ".parcel-cache", "target"]:
target_dir = tmp_path / artifact_dir
target_dir.mkdir(parents=True, exist_ok=True)
(target_dir / "generated.py").write_text("print('artifact')\n", encoding="utf-8")
builder = IndexTreeBuilder(
registry=MagicMock(),
mapper=MagicMock(),
config=Config(data_dir=tmp_path / "data"),
incremental=False,
)
dirs_by_depth = builder._collect_dirs_by_depth(tmp_path)
discovered_dirs = _relative_dirs(tmp_path, dirs_by_depth)
assert "src" in discovered_dirs
assert "dist" not in discovered_dirs
assert "build" not in discovered_dirs
assert "coverage" not in discovered_dirs
assert ".next" not in discovered_dirs
assert "out" not in discovered_dirs
assert ".turbo" not in discovered_dirs
assert ".parcel-cache" not in discovered_dirs
assert "target" not in discovered_dirs
def test_should_index_dir_ignores_transitive_build_only_subtrees(tmp_path: Path) -> None:
package_dir = tmp_path / "package"
dist_dir = package_dir / "dist"
dist_dir.mkdir(parents=True)
(dist_dir / "bundle.py").write_text("print('compiled')\n", encoding="utf-8")
builder = IndexTreeBuilder(
registry=MagicMock(),
mapper=MagicMock(),
config=Config(data_dir=tmp_path / "data"),
incremental=False,
)
assert builder._should_index_dir(package_dir) is False
def test_collect_dirs_by_depth_respects_relative_ignore_patterns_from_config(tmp_path: Path) -> None:
src_dir = tmp_path / "frontend" / "src"
src_dir.mkdir(parents=True)
(src_dir / "app.ts").write_text("export const app = 1\n", encoding="utf-8")
dist_dir = tmp_path / "frontend" / "dist"
dist_dir.mkdir(parents=True)
(dist_dir / "bundle.ts").write_text("export const bundle = 1\n", encoding="utf-8")
builder = IndexTreeBuilder(
registry=MagicMock(),
mapper=MagicMock(),
config=Config(data_dir=tmp_path / "data", ignore_patterns=["frontend/dist"]),
incremental=False,
)
dirs_by_depth = builder._collect_dirs_by_depth(tmp_path)
discovered_dirs = _relative_dirs(tmp_path, dirs_by_depth)
assert "frontend/src" in discovered_dirs
assert "frontend/dist" not in discovered_dirs