mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-13 02:41:50 +08:00
feat: Add unified LiteLLM API management with dashboard UI and CLI integration
- Create ccw-litellm Python package with AbstractEmbedder and AbstractLLMClient interfaces - Add BaseEmbedder abstraction and factory pattern to codex-lens for pluggable backends - Implement API Settings dashboard page for provider credentials and custom endpoints - Add REST API routes for CRUD operations on providers and endpoints - Extend CLI with --model parameter for custom endpoint routing - Integrate existing context-cache for @pattern file resolution - Add provider model registry with predefined models per provider type - Include i18n translations (en/zh) for all new UI elements 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
45
ccw-litellm/src/ccw_litellm/interfaces/llm.py
Normal file
45
ccw-litellm/src/ccw_litellm/interfaces/llm.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Literal, Sequence
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ChatMessage:
|
||||
role: Literal["system", "user", "assistant", "tool"]
|
||||
content: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class LLMResponse:
|
||||
content: str
|
||||
raw: Any | None = None
|
||||
|
||||
|
||||
class AbstractLLMClient(ABC):
|
||||
"""LiteLLM-like client interface.
|
||||
|
||||
Implementers only need to provide synchronous methods; async wrappers are
|
||||
provided via `asyncio.to_thread`.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> LLMResponse:
|
||||
"""Chat completion for a sequence of messages."""
|
||||
|
||||
@abstractmethod
|
||||
def complete(self, prompt: str, **kwargs: Any) -> LLMResponse:
|
||||
"""Text completion for a prompt."""
|
||||
|
||||
async def achat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> LLMResponse:
|
||||
"""Async wrapper around `chat` using a worker thread by default."""
|
||||
|
||||
return await asyncio.to_thread(self.chat, messages, **kwargs)
|
||||
|
||||
async def acomplete(self, prompt: str, **kwargs: Any) -> LLMResponse:
|
||||
"""Async wrapper around `complete` using a worker thread by default."""
|
||||
|
||||
return await asyncio.to_thread(self.complete, prompt, **kwargs)
|
||||
|
||||
Reference in New Issue
Block a user