- Add submit_finetune and get_finetune_status abstract methods to LLMClient base - Implement both methods in ZhipuAIClient using asyncio.get_running_loop() - Rewrite finetune_service to call llm.submit_finetune / llm.get_finetune_status instead of accessing llm._client directly, restoring interface encapsulation - Replace asyncio.get_event_loop() with get_running_loop() in ZhipuAIClient._call and all four methods in RustFSClient (deprecated in Python 3.10+) - Update test_finetune_service to mock the LLMClient interface methods as AsyncMocks - Add two new tests in test_llm_client for submit_finetune and get_finetune_status
20 lines
794 B
Python
20 lines
794 B
Python
from abc import ABC, abstractmethod
|
|
|
|
|
|
class LLMClient(ABC):
|
|
@abstractmethod
|
|
async def chat(self, model: str, messages: list[dict]) -> str:
|
|
"""Send a text chat request and return the response content string."""
|
|
|
|
@abstractmethod
|
|
async def chat_vision(self, model: str, messages: list[dict]) -> str:
|
|
"""Send a multimodal (vision) chat request and return the response content string."""
|
|
|
|
@abstractmethod
|
|
async def submit_finetune(self, jsonl_url: str, base_model: str, hyperparams: dict) -> str:
|
|
"""Submit a fine-tune job and return the job_id."""
|
|
|
|
@abstractmethod
|
|
async def get_finetune_status(self, job_id: str) -> dict:
|
|
"""Return a dict with keys: job_id, status (raw SDK string), progress (int|None), error_message (str|None)."""
|