mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-09 02:24:11 +08:00
删除 SPLADE 稀疏神经搜索后端和 hybrid_cascade 策略,
将搜索架构从 6 种后端简化为 4 种(FTS Exact/Fuzzy, Binary Vector, Dense Vector, LSP)。
主要变更:
- 删除 splade_encoder.py, splade_index.py, migration_009 等 4 个文件
- 移除 config.py 中 SPLADE 相关配置(enable_splade, splade_model 等)
- DEFAULT_WEIGHTS 改为 FTS 权重 {exact:0.25, fuzzy:0.1, vector:0.5, lsp:0.15}
- 删除 hybrid_cascade_search(),所有 cascade fallback 改为 self.search()
- API fusion_strategy='hybrid' 向后兼容映射到 binary_rerank
- 删除 CLI index_splade/splade_status 命令和 --method splade
- 更新测试、基准测试和文档
112 lines
2.4 KiB
TOML
112 lines
2.4 KiB
TOML
[build-system]
|
|
requires = ["setuptools>=61.0"]
|
|
build-backend = "setuptools.build_meta"
|
|
|
|
[project]
|
|
name = "codex-lens"
|
|
version = "0.1.0"
|
|
description = "CodexLens multi-modal code analysis platform"
|
|
readme = "README.md"
|
|
requires-python = ">=3.10"
|
|
license = "MIT"
|
|
authors = [
|
|
{ name = "CodexLens contributors" }
|
|
]
|
|
dependencies = [
|
|
"typer>=0.9",
|
|
"rich>=13",
|
|
"pydantic>=2.0",
|
|
"tree-sitter>=0.20",
|
|
"tree-sitter-python>=0.25",
|
|
"tree-sitter-javascript>=0.25",
|
|
"tree-sitter-typescript>=0.23",
|
|
"pathspec>=0.11",
|
|
"watchdog>=3.0",
|
|
]
|
|
|
|
[project.optional-dependencies]
|
|
# Semantic search using fastembed (ONNX-based, lightweight ~200MB)
|
|
semantic = [
|
|
"numpy>=1.24",
|
|
"fastembed>=0.2",
|
|
"hnswlib>=0.8.0",
|
|
]
|
|
|
|
# GPU acceleration for semantic search (NVIDIA CUDA)
|
|
# Install with: pip install codexlens[semantic-gpu]
|
|
semantic-gpu = [
|
|
"numpy>=1.24",
|
|
"fastembed>=0.2",
|
|
"hnswlib>=0.8.0",
|
|
"onnxruntime-gpu>=1.15.0", # CUDA support
|
|
]
|
|
|
|
# GPU acceleration for Windows (DirectML - supports NVIDIA/AMD/Intel)
|
|
# Install with: pip install codexlens[semantic-directml]
|
|
semantic-directml = [
|
|
"numpy>=1.24",
|
|
"fastembed>=0.2",
|
|
"hnswlib>=0.8.0",
|
|
"onnxruntime-directml>=1.15.0", # DirectML support
|
|
]
|
|
|
|
# Cross-encoder reranking (second-stage, optional)
|
|
# Install with: pip install codexlens[reranker] (default: ONNX backend)
|
|
reranker-onnx = [
|
|
"optimum>=1.16",
|
|
"onnxruntime>=1.15",
|
|
"transformers>=4.36",
|
|
]
|
|
|
|
# Remote reranking via HTTP API
|
|
reranker-api = [
|
|
"httpx>=0.25",
|
|
]
|
|
|
|
# LLM-based reranking via ccw-litellm
|
|
reranker-litellm = [
|
|
"ccw-litellm>=0.1",
|
|
]
|
|
|
|
# Legacy sentence-transformers CrossEncoder reranker
|
|
reranker-legacy = [
|
|
"sentence-transformers>=2.2",
|
|
]
|
|
|
|
# Backward-compatible alias for default reranker backend
|
|
reranker = [
|
|
"optimum>=1.16",
|
|
"onnxruntime>=1.15",
|
|
"transformers>=4.36",
|
|
]
|
|
|
|
# Encoding detection for non-UTF8 files
|
|
encoding = [
|
|
"chardet>=5.0",
|
|
]
|
|
|
|
# Clustering for staged hybrid search (HDBSCAN + sklearn)
|
|
clustering = [
|
|
"hdbscan>=0.8.1",
|
|
"scikit-learn>=1.3.0",
|
|
]
|
|
|
|
# Full features including tiktoken for accurate token counting
|
|
full = [
|
|
"tiktoken>=0.5.0",
|
|
]
|
|
|
|
# Language Server Protocol support
|
|
lsp = [
|
|
"pygls>=1.3.0",
|
|
]
|
|
|
|
[project.scripts]
|
|
codexlens-lsp = "codexlens.lsp:main"
|
|
|
|
[project.urls]
|
|
Homepage = "https://github.com/openai/codex-lens"
|
|
|
|
[tool.setuptools]
|
|
package-dir = { "" = "src" }
|