feat: Phase 1+2 — project setup and core infrastructure
- requirements.txt, config.yaml, .env, Dockerfile, docker-compose.yml - app/core: config (YAML+env override), logging (JSON structured), exceptions (typed hierarchy), json_utils (Markdown fence stripping) - app/clients: LLMClient ABC + ZhipuAIClient (run_in_executor), StorageClient ABC + RustFSClient (boto3 head_object for size check) - app/main.py: FastAPI app with health endpoint and router registration - app/core/dependencies.py: lru_cache singleton factories - tests/conftest.py: mock_llm, mock_storage, test_app, client fixtures - pytest.ini: asyncio_mode=auto - 11 unit tests passing
This commit is contained in:
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
BIN
app/core/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
app/core/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/core/__pycache__/config.cpython-312.pyc
Normal file
BIN
app/core/__pycache__/config.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/core/__pycache__/dependencies.cpython-312.pyc
Normal file
BIN
app/core/__pycache__/dependencies.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/core/__pycache__/exceptions.cpython-312.pyc
Normal file
BIN
app/core/__pycache__/exceptions.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/core/__pycache__/json_utils.cpython-312.pyc
Normal file
BIN
app/core/__pycache__/json_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/core/__pycache__/logging.cpython-312.pyc
Normal file
BIN
app/core/__pycache__/logging.cpython-312.pyc
Normal file
Binary file not shown.
46
app/core/config.py
Normal file
46
app/core/config.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import os
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Maps environment variable names to nested YAML key paths
|
||||
_ENV_OVERRIDES: dict[str, list[str]] = {
|
||||
"ZHIPUAI_API_KEY": ["zhipuai", "api_key"],
|
||||
"STORAGE_ACCESS_KEY": ["storage", "access_key"],
|
||||
"STORAGE_SECRET_KEY": ["storage", "secret_key"],
|
||||
"STORAGE_ENDPOINT": ["storage", "endpoint"],
|
||||
"BACKEND_CALLBACK_URL": ["backend", "callback_url"],
|
||||
"LOG_LEVEL": ["server", "log_level"],
|
||||
"MAX_VIDEO_SIZE_MB": ["video", "max_file_size_mb"],
|
||||
}
|
||||
|
||||
_CONFIG_PATH = Path(__file__).parent.parent.parent / "config.yaml"
|
||||
|
||||
|
||||
def _set_nested(cfg: dict, keys: list[str], value: Any) -> None:
|
||||
for key in keys[:-1]:
|
||||
cfg = cfg.setdefault(key, {})
|
||||
# Coerce numeric env vars
|
||||
try:
|
||||
value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
cfg[keys[-1]] = value
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_config() -> dict:
|
||||
with open(_CONFIG_PATH, "r", encoding="utf-8") as f:
|
||||
cfg: dict = yaml.safe_load(f)
|
||||
|
||||
for env_var, key_path in _ENV_OVERRIDES.items():
|
||||
value = os.environ.get(env_var)
|
||||
if value is not None:
|
||||
_set_nested(cfg, key_path, value)
|
||||
|
||||
return cfg
|
||||
23
app/core/dependencies.py
Normal file
23
app/core/dependencies.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from functools import lru_cache
|
||||
|
||||
from app.clients.llm.base import LLMClient
|
||||
from app.clients.llm.zhipuai_client import ZhipuAIClient
|
||||
from app.clients.storage.base import StorageClient
|
||||
from app.clients.storage.rustfs_client import RustFSClient
|
||||
from app.core.config import get_config
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_llm_client() -> LLMClient:
|
||||
cfg = get_config()
|
||||
return ZhipuAIClient(api_key=cfg["zhipuai"]["api_key"])
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_storage_client() -> StorageClient:
|
||||
cfg = get_config()
|
||||
return RustFSClient(
|
||||
endpoint=cfg["storage"]["endpoint"],
|
||||
access_key=cfg["storage"]["access_key"],
|
||||
secret_key=cfg["storage"]["secret_key"],
|
||||
)
|
||||
50
app/core/exceptions.py
Normal file
50
app/core/exceptions.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from fastapi import Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
|
||||
class AIServiceError(Exception):
|
||||
status_code: int = 500
|
||||
code: str = "INTERNAL_ERROR"
|
||||
|
||||
def __init__(self, message: str) -> None:
|
||||
self.message = message
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class UnsupportedFileTypeError(AIServiceError):
|
||||
status_code = 400
|
||||
code = "UNSUPPORTED_FILE_TYPE"
|
||||
|
||||
|
||||
class VideoTooLargeError(AIServiceError):
|
||||
status_code = 400
|
||||
code = "VIDEO_TOO_LARGE"
|
||||
|
||||
|
||||
class StorageError(AIServiceError):
|
||||
status_code = 502
|
||||
code = "STORAGE_ERROR"
|
||||
|
||||
|
||||
class LLMParseError(AIServiceError):
|
||||
status_code = 502
|
||||
code = "LLM_PARSE_ERROR"
|
||||
|
||||
|
||||
class LLMCallError(AIServiceError):
|
||||
status_code = 503
|
||||
code = "LLM_CALL_ERROR"
|
||||
|
||||
|
||||
async def ai_service_exception_handler(request: Request, exc: AIServiceError) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={"code": exc.code, "message": exc.message},
|
||||
)
|
||||
|
||||
|
||||
async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"code": "INTERNAL_ERROR", "message": str(exc)},
|
||||
)
|
||||
19
app/core/json_utils.py
Normal file
19
app/core/json_utils.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import json
|
||||
import re
|
||||
|
||||
from app.core.exceptions import LLMParseError
|
||||
|
||||
|
||||
def extract_json(text: str) -> any:
|
||||
"""Parse JSON from LLM response, stripping Markdown code fences if present."""
|
||||
text = text.strip()
|
||||
|
||||
# Strip ```json ... ``` or ``` ... ``` fences
|
||||
fence_match = re.search(r"```(?:json)?\s*([\s\S]+?)\s*```", text)
|
||||
if fence_match:
|
||||
text = fence_match.group(1).strip()
|
||||
|
||||
try:
|
||||
return json.loads(text)
|
||||
except json.JSONDecodeError as e:
|
||||
raise LLMParseError(f"大模型返回非合法 JSON: {e}") from e
|
||||
62
app/core/logging.py
Normal file
62
app/core/logging.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import Callable
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
logger = logging.getLogger(name)
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(_JsonFormatter())
|
||||
logger.addHandler(handler)
|
||||
logger.propagate = False
|
||||
return logger
|
||||
|
||||
|
||||
class _JsonFormatter(logging.Formatter):
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
payload = {
|
||||
"time": self.formatTime(record, datefmt="%Y-%m-%dT%H:%M:%S"),
|
||||
"level": record.levelname,
|
||||
"logger": record.name,
|
||||
"message": record.getMessage(),
|
||||
}
|
||||
if record.exc_info:
|
||||
payload["exc_info"] = self.formatException(record.exc_info)
|
||||
# Merge any extra fields passed via `extra=`
|
||||
for key, value in record.__dict__.items():
|
||||
if key not in (
|
||||
"name", "msg", "args", "levelname", "levelno", "pathname",
|
||||
"filename", "module", "exc_info", "exc_text", "stack_info",
|
||||
"lineno", "funcName", "created", "msecs", "relativeCreated",
|
||||
"thread", "threadName", "processName", "process", "message",
|
||||
"taskName",
|
||||
):
|
||||
payload[key] = value
|
||||
return json.dumps(payload, ensure_ascii=False)
|
||||
|
||||
|
||||
class RequestLoggingMiddleware(BaseHTTPMiddleware):
|
||||
def __init__(self, app, logger: logging.Logger | None = None) -> None:
|
||||
super().__init__(app)
|
||||
self._logger = logger or get_logger("request")
|
||||
|
||||
async def dispatch(self, request: Request, call_next: Callable) -> Response:
|
||||
start = time.perf_counter()
|
||||
response = await call_next(request)
|
||||
duration_ms = round((time.perf_counter() - start) * 1000, 1)
|
||||
self._logger.info(
|
||||
"request",
|
||||
extra={
|
||||
"method": request.method,
|
||||
"path": request.url.path,
|
||||
"status": response.status_code,
|
||||
"duration_ms": duration_ms,
|
||||
},
|
||||
)
|
||||
return response
|
||||
Reference in New Issue
Block a user