refactor: finetune through LLMClient interface + get_running_loop
- Add submit_finetune and get_finetune_status abstract methods to LLMClient base - Implement both methods in ZhipuAIClient using asyncio.get_running_loop() - Rewrite finetune_service to call llm.submit_finetune / llm.get_finetune_status instead of accessing llm._client directly, restoring interface encapsulation - Replace asyncio.get_event_loop() with get_running_loop() in ZhipuAIClient._call and all four methods in RustFSClient (deprecated in Python 3.10+) - Update test_finetune_service to mock the LLMClient interface methods as AsyncMocks - Add two new tests in test_llm_client for submit_finetune and get_finetune_status
This commit is contained in:
@@ -9,3 +9,11 @@ class LLMClient(ABC):
|
||||
@abstractmethod
|
||||
async def chat_vision(self, model: str, messages: list[dict]) -> str:
|
||||
"""Send a multimodal (vision) chat request and return the response content string."""
|
||||
|
||||
@abstractmethod
|
||||
async def submit_finetune(self, jsonl_url: str, base_model: str, hyperparams: dict) -> str:
|
||||
"""Submit a fine-tune job and return the job_id."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_finetune_status(self, job_id: str) -> dict:
|
||||
"""Return a dict with keys: job_id, status (raw SDK string), progress (int|None), error_message (str|None)."""
|
||||
|
||||
@@ -19,8 +19,39 @@ class ZhipuAIClient(LLMClient):
|
||||
async def chat_vision(self, model: str, messages: list[dict]) -> str:
|
||||
return await self._call(model, messages)
|
||||
|
||||
async def submit_finetune(self, jsonl_url: str, base_model: str, hyperparams: dict) -> str:
|
||||
loop = asyncio.get_running_loop()
|
||||
try:
|
||||
resp = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self._client.fine_tuning.jobs.create(
|
||||
training_file=jsonl_url,
|
||||
model=base_model,
|
||||
hyperparameters=hyperparams,
|
||||
),
|
||||
)
|
||||
return resp.id
|
||||
except Exception as exc:
|
||||
raise LLMCallError(f"微调任务提交失败: {exc}") from exc
|
||||
|
||||
async def get_finetune_status(self, job_id: str) -> dict:
|
||||
loop = asyncio.get_running_loop()
|
||||
try:
|
||||
resp = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self._client.fine_tuning.jobs.retrieve(job_id),
|
||||
)
|
||||
return {
|
||||
"job_id": resp.id,
|
||||
"status": resp.status,
|
||||
"progress": int(resp.progress) if getattr(resp, "progress", None) is not None else None,
|
||||
"error_message": getattr(resp, "error_message", None),
|
||||
}
|
||||
except Exception as exc:
|
||||
raise LLMCallError(f"查询微调任务失败: {exc}") from exc
|
||||
|
||||
async def _call(self, model: str, messages: list[dict]) -> str:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop = asyncio.get_running_loop()
|
||||
try:
|
||||
response = await loop.run_in_executor(
|
||||
None,
|
||||
|
||||
Reference in New Issue
Block a user