Files
Claude-Code-Workflow/codex-lens/pyproject.toml
catlog22 cf5fecd66d fix(codex-lens): resolve installation issues from frontend
- Add missing README.md file required by setuptools
- Fix deprecated license format in pyproject.toml (use SPDX string instead of TOML table)
- Add MIT LICENSE file for proper packaging
- Verified successful local installation and import

Fixes permission denied error during npm-based installation on macOS
2026-01-24 14:43:39 +08:00

124 lines
2.6 KiB
TOML

[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "codex-lens"
version = "0.1.0"
description = "CodexLens multi-modal code analysis platform"
readme = "README.md"
requires-python = ">=3.10"
license = "MIT"
authors = [
{ name = "CodexLens contributors" }
]
dependencies = [
"typer>=0.9",
"rich>=13",
"pydantic>=2.0",
"tree-sitter>=0.20",
"tree-sitter-python>=0.25",
"tree-sitter-javascript>=0.25",
"tree-sitter-typescript>=0.23",
"pathspec>=0.11",
"watchdog>=3.0",
]
[project.optional-dependencies]
# Semantic search using fastembed (ONNX-based, lightweight ~200MB)
semantic = [
"numpy>=1.24",
"fastembed>=0.2",
"hnswlib>=0.8.0",
]
# GPU acceleration for semantic search (NVIDIA CUDA)
# Install with: pip install codexlens[semantic-gpu]
semantic-gpu = [
"numpy>=1.24",
"fastembed>=0.2",
"hnswlib>=0.8.0",
"onnxruntime-gpu>=1.15.0", # CUDA support
]
# GPU acceleration for Windows (DirectML - supports NVIDIA/AMD/Intel)
# Install with: pip install codexlens[semantic-directml]
semantic-directml = [
"numpy>=1.24",
"fastembed>=0.2",
"hnswlib>=0.8.0",
"onnxruntime-directml>=1.15.0", # DirectML support
]
# Cross-encoder reranking (second-stage, optional)
# Install with: pip install codexlens[reranker] (default: ONNX backend)
reranker-onnx = [
"optimum>=1.16",
"onnxruntime>=1.15",
"transformers>=4.36",
]
# Remote reranking via HTTP API
reranker-api = [
"httpx>=0.25",
]
# LLM-based reranking via ccw-litellm
reranker-litellm = [
"ccw-litellm>=0.1",
]
# Legacy sentence-transformers CrossEncoder reranker
reranker-legacy = [
"sentence-transformers>=2.2",
]
# Backward-compatible alias for default reranker backend
reranker = [
"optimum>=1.16",
"onnxruntime>=1.15",
"transformers>=4.36",
]
# SPLADE sparse retrieval
splade = [
"transformers>=4.36",
"optimum[onnxruntime]>=1.16",
]
# SPLADE with GPU acceleration (CUDA)
splade-gpu = [
"transformers>=4.36",
"optimum[onnxruntime-gpu]>=1.16",
]
# Encoding detection for non-UTF8 files
encoding = [
"chardet>=5.0",
]
# Clustering for staged hybrid search (HDBSCAN + sklearn)
clustering = [
"hdbscan>=0.8.1",
"scikit-learn>=1.3.0",
]
# Full features including tiktoken for accurate token counting
full = [
"tiktoken>=0.5.0",
]
# Language Server Protocol support
lsp = [
"pygls>=1.3.0",
]
[project.scripts]
codexlens-lsp = "codexlens.lsp:main"
[project.urls]
Homepage = "https://github.com/openai/codex-lens"
[tool.setuptools]
package-dir = { "" = "src" }