mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-12 02:37:45 +08:00
feat: Add unified LiteLLM API management with dashboard UI and CLI integration
- Create ccw-litellm Python package with AbstractEmbedder and AbstractLLMClient interfaces - Add BaseEmbedder abstraction and factory pattern to codex-lens for pluggable backends - Implement API Settings dashboard page for provider credentials and custom endpoints - Add REST API routes for CRUD operations on providers and endpoints - Extend CLI with --model parameter for custom endpoint routing - Integrate existing context-cache for @pattern file resolution - Add provider model registry with predefined models per provider type - Include i18n translations (en/zh) for all new UI elements 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
11
ccw-litellm/tests/conftest.py
Normal file
11
ccw-litellm/tests/conftest.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def pytest_configure() -> None:
|
||||
project_root = Path(__file__).resolve().parents[1]
|
||||
src_dir = project_root / "src"
|
||||
sys.path.insert(0, str(src_dir))
|
||||
|
||||
64
ccw-litellm/tests/test_interfaces.py
Normal file
64
ccw-litellm/tests/test_interfaces.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Sequence
|
||||
|
||||
import numpy as np
|
||||
|
||||
from ccw_litellm.interfaces import AbstractEmbedder, AbstractLLMClient, ChatMessage, LLMResponse
|
||||
|
||||
|
||||
class _DummyEmbedder(AbstractEmbedder):
|
||||
@property
|
||||
def dimensions(self) -> int:
|
||||
return 3
|
||||
|
||||
def embed(
|
||||
self,
|
||||
texts: str | Sequence[str],
|
||||
*,
|
||||
batch_size: int | None = None,
|
||||
**kwargs: Any,
|
||||
) -> np.ndarray:
|
||||
if isinstance(texts, str):
|
||||
texts = [texts]
|
||||
_ = batch_size
|
||||
_ = kwargs
|
||||
return np.zeros((len(texts), self.dimensions), dtype=np.float32)
|
||||
|
||||
|
||||
class _DummyLLM(AbstractLLMClient):
|
||||
def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> LLMResponse:
|
||||
_ = kwargs
|
||||
return LLMResponse(content="".join(m.content for m in messages))
|
||||
|
||||
def complete(self, prompt: str, **kwargs: Any) -> LLMResponse:
|
||||
_ = kwargs
|
||||
return LLMResponse(content=prompt)
|
||||
|
||||
|
||||
def test_embed_sync_shape_and_dtype() -> None:
|
||||
emb = _DummyEmbedder()
|
||||
out = emb.embed(["a", "b"])
|
||||
assert out.shape == (2, 3)
|
||||
assert out.dtype == np.float32
|
||||
|
||||
|
||||
def test_embed_async_wrapper() -> None:
|
||||
emb = _DummyEmbedder()
|
||||
out = asyncio.run(emb.aembed("x"))
|
||||
assert out.shape == (1, 3)
|
||||
|
||||
|
||||
def test_llm_sync() -> None:
|
||||
llm = _DummyLLM()
|
||||
out = llm.chat([ChatMessage(role="user", content="hi")])
|
||||
assert out == LLMResponse(content="hi")
|
||||
|
||||
|
||||
def test_llm_async_wrappers() -> None:
|
||||
llm = _DummyLLM()
|
||||
out1 = asyncio.run(llm.achat([ChatMessage(role="user", content="a")]))
|
||||
out2 = asyncio.run(llm.acomplete("b"))
|
||||
assert out1.content == "a"
|
||||
assert out2.content == "b"
|
||||
Reference in New Issue
Block a user