Files
Claude-Code-Workflow/ccw-litellm/tests/test_interfaces.py
catlog22 bf66b095c7 feat: Add unified LiteLLM API management with dashboard UI and CLI integration
- Create ccw-litellm Python package with AbstractEmbedder and AbstractLLMClient interfaces
- Add BaseEmbedder abstraction and factory pattern to codex-lens for pluggable backends
- Implement API Settings dashboard page for provider credentials and custom endpoints
- Add REST API routes for CRUD operations on providers and endpoints
- Extend CLI with --model parameter for custom endpoint routing
- Integrate existing context-cache for @pattern file resolution
- Add provider model registry with predefined models per provider type
- Include i18n translations (en/zh) for all new UI elements

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-23 20:36:32 +08:00

65 lines
1.7 KiB
Python

from __future__ import annotations
import asyncio
from typing import Any, Sequence
import numpy as np
from ccw_litellm.interfaces import AbstractEmbedder, AbstractLLMClient, ChatMessage, LLMResponse
class _DummyEmbedder(AbstractEmbedder):
@property
def dimensions(self) -> int:
return 3
def embed(
self,
texts: str | Sequence[str],
*,
batch_size: int | None = None,
**kwargs: Any,
) -> np.ndarray:
if isinstance(texts, str):
texts = [texts]
_ = batch_size
_ = kwargs
return np.zeros((len(texts), self.dimensions), dtype=np.float32)
class _DummyLLM(AbstractLLMClient):
def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> LLMResponse:
_ = kwargs
return LLMResponse(content="".join(m.content for m in messages))
def complete(self, prompt: str, **kwargs: Any) -> LLMResponse:
_ = kwargs
return LLMResponse(content=prompt)
def test_embed_sync_shape_and_dtype() -> None:
emb = _DummyEmbedder()
out = emb.embed(["a", "b"])
assert out.shape == (2, 3)
assert out.dtype == np.float32
def test_embed_async_wrapper() -> None:
emb = _DummyEmbedder()
out = asyncio.run(emb.aembed("x"))
assert out.shape == (1, 3)
def test_llm_sync() -> None:
llm = _DummyLLM()
out = llm.chat([ChatMessage(role="user", content="hi")])
assert out == LLMResponse(content="hi")
def test_llm_async_wrappers() -> None:
llm = _DummyLLM()
out1 = asyncio.run(llm.achat([ChatMessage(role="user", content="a")]))
out2 = asyncio.run(llm.acomplete("b"))
assert out1.content == "a"
assert out2.content == "b"