feat: Phase 1+2 — project setup and core infrastructure
- requirements.txt, config.yaml, .env, Dockerfile, docker-compose.yml - app/core: config (YAML+env override), logging (JSON structured), exceptions (typed hierarchy), json_utils (Markdown fence stripping) - app/clients: LLMClient ABC + ZhipuAIClient (run_in_executor), StorageClient ABC + RustFSClient (boto3 head_object for size check) - app/main.py: FastAPI app with health endpoint and router registration - app/core/dependencies.py: lru_cache singleton factories - tests/conftest.py: mock_llm, mock_storage, test_app, client fixtures - pytest.ini: asyncio_mode=auto - 11 unit tests passing
This commit is contained in:
0
app/clients/llm/__init__.py
Normal file
0
app/clients/llm/__init__.py
Normal file
BIN
app/clients/llm/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
app/clients/llm/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/clients/llm/__pycache__/base.cpython-312.pyc
Normal file
BIN
app/clients/llm/__pycache__/base.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/clients/llm/__pycache__/zhipuai_client.cpython-312.pyc
Normal file
BIN
app/clients/llm/__pycache__/zhipuai_client.cpython-312.pyc
Normal file
Binary file not shown.
11
app/clients/llm/base.py
Normal file
11
app/clients/llm/base.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class LLMClient(ABC):
|
||||
@abstractmethod
|
||||
async def chat(self, model: str, messages: list[dict]) -> str:
|
||||
"""Send a text chat request and return the response content string."""
|
||||
|
||||
@abstractmethod
|
||||
async def chat_vision(self, model: str, messages: list[dict]) -> str:
|
||||
"""Send a multimodal (vision) chat request and return the response content string."""
|
||||
37
app/clients/llm/zhipuai_client.py
Normal file
37
app/clients/llm/zhipuai_client.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import asyncio
|
||||
|
||||
from zhipuai import ZhipuAI
|
||||
|
||||
from app.clients.llm.base import LLMClient
|
||||
from app.core.exceptions import LLMCallError
|
||||
from app.core.logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ZhipuAIClient(LLMClient):
|
||||
def __init__(self, api_key: str) -> None:
|
||||
self._client = ZhipuAI(api_key=api_key)
|
||||
|
||||
async def chat(self, model: str, messages: list[dict]) -> str:
|
||||
return await self._call(model, messages)
|
||||
|
||||
async def chat_vision(self, model: str, messages: list[dict]) -> str:
|
||||
return await self._call(model, messages)
|
||||
|
||||
async def _call(self, model: str, messages: list[dict]) -> str:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
response = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self._client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
),
|
||||
)
|
||||
content = response.choices[0].message.content
|
||||
logger.info("llm_call", extra={"model": model, "response_len": len(content)})
|
||||
return content
|
||||
except Exception as exc:
|
||||
logger.error("llm_call_error", extra={"model": model, "error": str(exc)})
|
||||
raise LLMCallError(f"大模型调用失败: {exc}") from exc
|
||||
Reference in New Issue
Block a user