diff --git a/maubot_llmplus/thrid_platform.py b/maubot_llmplus/thrid_platform.py index 7e6def3..2c93cc6 100644 --- a/maubot_llmplus/thrid_platform.py +++ b/maubot_llmplus/thrid_platform.py @@ -124,7 +124,7 @@ class XAi(Platform): def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - def create_chat_completion(self, plugin: AbsExtraConfigPlugin, evt: MessageEvent) -> ChatCompletion: + async def create_chat_completion(self, plugin: AbsExtraConfigPlugin, evt: MessageEvent) -> ChatCompletion: full_context = [] context = await maubot_llmplus.platforms.get_context(plugin, self, evt) full_context.extend(list(context)) @@ -143,7 +143,7 @@ class XAi(Platform): request_body["temperature"] = self.temperature endpoint = f"{self.url}/v1/chat/completions" - with self.http.post(url=endpoint, data=json.dumps(request_body), headers=headers) as resp: + async with self.http.post(url=endpoint, data=json.dumps(request_body), headers=headers) as resp: # plugin.log.debug(f"响应内容:{response.status}, {await response.json()}") if response.status != 200: return ChatCompletion( @@ -161,7 +161,7 @@ class XAi(Platform): pass - def list_models(self) -> List[str]: + async def list_models(self) -> List[str]: # 调用openai接口获取模型列表 full_url = f"{self.url}/v1/models" async with self.http.get(full_url) as response: