- requirements.txt, config.yaml, .env, Dockerfile, docker-compose.yml - app/core: config (YAML+env override), logging (JSON structured), exceptions (typed hierarchy), json_utils (Markdown fence stripping) - app/clients: LLMClient ABC + ZhipuAIClient (run_in_executor), StorageClient ABC + RustFSClient (boto3 head_object for size check) - app/main.py: FastAPI app with health endpoint and router registration - app/core/dependencies.py: lru_cache singleton factories - tests/conftest.py: mock_llm, mock_storage, test_app, client fixtures - pytest.ini: asyncio_mode=auto - 11 unit tests passing
51 lines
1.2 KiB
Python
51 lines
1.2 KiB
Python
from fastapi import Request
|
|
from fastapi.responses import JSONResponse
|
|
|
|
|
|
class AIServiceError(Exception):
|
|
status_code: int = 500
|
|
code: str = "INTERNAL_ERROR"
|
|
|
|
def __init__(self, message: str) -> None:
|
|
self.message = message
|
|
super().__init__(message)
|
|
|
|
|
|
class UnsupportedFileTypeError(AIServiceError):
|
|
status_code = 400
|
|
code = "UNSUPPORTED_FILE_TYPE"
|
|
|
|
|
|
class VideoTooLargeError(AIServiceError):
|
|
status_code = 400
|
|
code = "VIDEO_TOO_LARGE"
|
|
|
|
|
|
class StorageError(AIServiceError):
|
|
status_code = 502
|
|
code = "STORAGE_ERROR"
|
|
|
|
|
|
class LLMParseError(AIServiceError):
|
|
status_code = 502
|
|
code = "LLM_PARSE_ERROR"
|
|
|
|
|
|
class LLMCallError(AIServiceError):
|
|
status_code = 503
|
|
code = "LLM_CALL_ERROR"
|
|
|
|
|
|
async def ai_service_exception_handler(request: Request, exc: AIServiceError) -> JSONResponse:
|
|
return JSONResponse(
|
|
status_code=exc.status_code,
|
|
content={"code": exc.code, "message": exc.message},
|
|
)
|
|
|
|
|
|
async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content={"code": "INTERNAL_ERROR", "message": str(exc)},
|
|
)
|