Files
label_ai_service/app/services/finetune_service.py
wh 0880e1018c refactor: finetune through LLMClient interface + get_running_loop
- Add submit_finetune and get_finetune_status abstract methods to LLMClient base
- Implement both methods in ZhipuAIClient using asyncio.get_running_loop()
- Rewrite finetune_service to call llm.submit_finetune / llm.get_finetune_status
  instead of accessing llm._client directly, restoring interface encapsulation
- Replace asyncio.get_event_loop() with get_running_loop() in ZhipuAIClient._call
  and all four methods in RustFSClient (deprecated in Python 3.10+)
- Update test_finetune_service to mock the LLMClient interface methods as AsyncMocks
- Add two new tests in test_llm_client for submit_finetune and get_finetune_status
2026-04-10 16:43:28 +08:00

36 lines
1.2 KiB
Python

from app.clients.llm.base import LLMClient
from app.core.logging import get_logger
from app.models.finetune_models import (
FinetuneStartRequest,
FinetuneStartResponse,
FinetuneStatusResponse,
)
logger = get_logger(__name__)
_STATUS_MAP = {
"running": "RUNNING",
"succeeded": "SUCCESS",
"failed": "FAILED",
}
async def submit_finetune(req: FinetuneStartRequest, llm: LLMClient) -> FinetuneStartResponse:
"""Submit a fine-tune job via the LLMClient interface and return the job ID."""
job_id = await llm.submit_finetune(req.jsonl_url, req.base_model, req.hyperparams or {})
logger.info("finetune_submit", extra={"job_id": job_id, "model": req.base_model})
return FinetuneStartResponse(job_id=job_id)
async def get_finetune_status(job_id: str, llm: LLMClient) -> FinetuneStatusResponse:
"""Retrieve fine-tune job status via the LLMClient interface."""
raw = await llm.get_finetune_status(job_id)
status = _STATUS_MAP.get(raw["status"], "RUNNING")
logger.info("finetune_status", extra={"job_id": job_id, "status": status})
return FinetuneStatusResponse(
job_id=raw["job_id"],
status=status,
progress=raw["progress"],
error_message=raw["error_message"],
)