Files
Claude-Code-Workflow/codex-lens/pyproject.toml
catlog22 f14418603a feat(cli): 添加 --rule 选项支持模板自动发现
重构 ccw cli 模板系统:

- 新增 template-discovery.ts 模块,支持扁平化模板自动发现
- 添加 --rule <template> 选项,自动加载 protocol 和 template
- 模板目录从嵌套结构 (prompts/category/file.txt) 迁移到扁平结构 (prompts/category-function.txt)
- 更新所有 agent/command 文件,使用 $PROTO $TMPL 环境变量替代 $(cat ...) 模式
- 支持模糊匹配:--rule 02-review-architecture 可匹配 analysis-review-architecture.txt

其他更新:
- Dashboard: 添加 Claude Manager 和 Issue Manager 页面
- Codex-lens: 增强 chain_search 和 clustering 模块

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 19:20:24 +08:00

124 lines
2.6 KiB
TOML

[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "codex-lens"
version = "0.1.0"
description = "CodexLens multi-modal code analysis platform"
readme = "README.md"
requires-python = ">=3.10"
license = { text = "MIT" }
authors = [
{ name = "CodexLens contributors" }
]
dependencies = [
"typer>=0.9",
"rich>=13",
"pydantic>=2.0",
"tree-sitter>=0.20",
"tree-sitter-python>=0.25",
"tree-sitter-javascript>=0.25",
"tree-sitter-typescript>=0.23",
"pathspec>=0.11",
"watchdog>=3.0",
]
[project.optional-dependencies]
# Semantic search using fastembed (ONNX-based, lightweight ~200MB)
semantic = [
"numpy>=1.24",
"fastembed>=0.2",
"hnswlib>=0.8.0",
]
# GPU acceleration for semantic search (NVIDIA CUDA)
# Install with: pip install codexlens[semantic-gpu]
semantic-gpu = [
"numpy>=1.24",
"fastembed>=0.2",
"hnswlib>=0.8.0",
"onnxruntime-gpu>=1.15.0", # CUDA support
]
# GPU acceleration for Windows (DirectML - supports NVIDIA/AMD/Intel)
# Install with: pip install codexlens[semantic-directml]
semantic-directml = [
"numpy>=1.24",
"fastembed>=0.2",
"hnswlib>=0.8.0",
"onnxruntime-directml>=1.15.0", # DirectML support
]
# Cross-encoder reranking (second-stage, optional)
# Install with: pip install codexlens[reranker] (default: ONNX backend)
reranker-onnx = [
"optimum>=1.16",
"onnxruntime>=1.15",
"transformers>=4.36",
]
# Remote reranking via HTTP API
reranker-api = [
"httpx>=0.25",
]
# LLM-based reranking via ccw-litellm
reranker-litellm = [
"ccw-litellm>=0.1",
]
# Legacy sentence-transformers CrossEncoder reranker
reranker-legacy = [
"sentence-transformers>=2.2",
]
# Backward-compatible alias for default reranker backend
reranker = [
"optimum>=1.16",
"onnxruntime>=1.15",
"transformers>=4.36",
]
# SPLADE sparse retrieval
splade = [
"transformers>=4.36",
"optimum[onnxruntime]>=1.16",
]
# SPLADE with GPU acceleration (CUDA)
splade-gpu = [
"transformers>=4.36",
"optimum[onnxruntime-gpu]>=1.16",
]
# Encoding detection for non-UTF8 files
encoding = [
"chardet>=5.0",
]
# Clustering for staged hybrid search (HDBSCAN + sklearn)
clustering = [
"hdbscan>=0.8.1",
"scikit-learn>=1.3.0",
]
# Full features including tiktoken for accurate token counting
full = [
"tiktoken>=0.5.0",
]
# Language Server Protocol support
lsp = [
"pygls>=1.3.0",
]
[project.scripts]
codexlens-lsp = "codexlens.lsp:main"
[project.urls]
Homepage = "https://github.com/openai/codex-lens"
[tool.setuptools]
package-dir = { "" = "src" }