feat: Phase 1+2 — project setup and core infrastructure

- requirements.txt, config.yaml, .env, Dockerfile, docker-compose.yml
- app/core: config (YAML+env override), logging (JSON structured),
  exceptions (typed hierarchy), json_utils (Markdown fence stripping)
- app/clients: LLMClient ABC + ZhipuAIClient (run_in_executor),
  StorageClient ABC + RustFSClient (boto3 head_object for size check)
- app/main.py: FastAPI app with health endpoint and router registration
- app/core/dependencies.py: lru_cache singleton factories
- tests/conftest.py: mock_llm, mock_storage, test_app, client fixtures
- pytest.ini: asyncio_mode=auto
- 11 unit tests passing
This commit is contained in:
wh
2026-04-10 15:22:45 +08:00
parent 4162d9f4e6
commit e1eb5e47b1
54 changed files with 716 additions and 0 deletions

View File

Binary file not shown.

View File

@@ -0,0 +1,21 @@
from abc import ABC, abstractmethod
class StorageClient(ABC):
@abstractmethod
async def download_bytes(self, bucket: str, path: str) -> bytes:
"""Download an object and return its raw bytes."""
@abstractmethod
async def upload_bytes(
self, bucket: str, path: str, data: bytes, content_type: str = "application/octet-stream"
) -> None:
"""Upload raw bytes to the given bucket/path."""
@abstractmethod
async def get_presigned_url(self, bucket: str, path: str, expires: int = 3600) -> str:
"""Return a presigned GET URL valid for `expires` seconds."""
@abstractmethod
async def get_object_size(self, bucket: str, path: str) -> int:
"""Return the object size in bytes without downloading it."""

View File

@@ -0,0 +1,70 @@
import asyncio
import io
import boto3
from botocore.exceptions import ClientError
from app.clients.storage.base import StorageClient
from app.core.exceptions import StorageError
from app.core.logging import get_logger
logger = get_logger(__name__)
class RustFSClient(StorageClient):
def __init__(self, endpoint: str, access_key: str, secret_key: str) -> None:
self._s3 = boto3.client(
"s3",
endpoint_url=endpoint,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
)
async def download_bytes(self, bucket: str, path: str) -> bytes:
loop = asyncio.get_event_loop()
try:
resp = await loop.run_in_executor(
None, lambda: self._s3.get_object(Bucket=bucket, Key=path)
)
return resp["Body"].read()
except ClientError as exc:
raise StorageError(f"存储下载失败 [{bucket}/{path}]: {exc}") from exc
async def upload_bytes(
self, bucket: str, path: str, data: bytes, content_type: str = "application/octet-stream"
) -> None:
loop = asyncio.get_event_loop()
try:
await loop.run_in_executor(
None,
lambda: self._s3.put_object(
Bucket=bucket, Key=path, Body=io.BytesIO(data), ContentType=content_type
),
)
except ClientError as exc:
raise StorageError(f"存储上传失败 [{bucket}/{path}]: {exc}") from exc
async def get_presigned_url(self, bucket: str, path: str, expires: int = 3600) -> str:
loop = asyncio.get_event_loop()
try:
url = await loop.run_in_executor(
None,
lambda: self._s3.generate_presigned_url(
"get_object",
Params={"Bucket": bucket, "Key": path},
ExpiresIn=expires,
),
)
return url
except ClientError as exc:
raise StorageError(f"生成预签名 URL 失败 [{bucket}/{path}]: {exc}") from exc
async def get_object_size(self, bucket: str, path: str) -> int:
loop = asyncio.get_event_loop()
try:
resp = await loop.run_in_executor(
None, lambda: self._s3.head_object(Bucket=bucket, Key=path)
)
return resp["ContentLength"]
except ClientError as exc:
raise StorageError(f"获取文件大小失败 [{bucket}/{path}]: {exc}") from exc