修复错误代码

This commit is contained in:
taylorxie
2026-03-09 17:12:27 +08:00
parent 17c18b48dc
commit 66881b0d91
3 changed files with 31 additions and 9 deletions

View File

@@ -130,10 +130,18 @@ class AiBotPlugin(AbsExtraConfigPlugin):
# 关闭typing提示
await self.client.set_typing(event.room_id, timeout=0)
# 打开typing提示
resp_content = chat_completion.message['content']
response = TextMessageEventContent(msgtype=MessageType.TEXT, body=resp_content, format=Format.HTML,
formatted_body=markdown.render(resp_content))
await event.respond(response, in_thread=self.config['reply_in_thread'])
if chat_completion.message['result']:
if hasattr(chat_completion.message, 'content'):
resp_content = chat_completion.message['content']
response = TextMessageEventContent(msgtype=MessageType.TEXT, body=resp_content, format=Format.HTML,
formatted_body=markdown.render(resp_content))
await event.respond(response, in_thread=self.config['reply_in_thread'])
else:
resp_content = "调用失败,请检查: " + chat_completion.finish_reason
response = TextMessageEventContent(msgtype=MessageType.TEXT, body=resp_content, format=Format.HTML,
formatted_body=markdown.render(resp_content))
await event.respond(response, in_thread=self.config['reply_in_thread'])
except Exception as e:
self.log.exception(f"Something went wrong: {e}")
await event.respond(f"Something went wrong: {e}")

View File

@@ -15,7 +15,8 @@ from maubot_llmplus.plugin import AbsExtraConfigPlugin, Config
class ChatCompletion:
def __init__(self, message: dict, finish_reason: str, model: Optional[str]) -> None:
def __init__(self, result: bool, message: dict, finish_reason: str, model: Optional[str]) -> None:
self.result = result
self.message = message
self.finish_reason = finish_reason
self.model = model

View File

@@ -53,6 +53,7 @@ class OpenAi(Platform):
# plugin.log.debug(f"响应内容:{response.status}, {await response.json()}")
if response.status != 200:
return ChatCompletion(
result=False,
message={},
finish_reason=f"Error: {await response.text()}",
model=None
@@ -60,6 +61,7 @@ class OpenAi(Platform):
response_json = await response.json()
choice = response_json["choices"][0]
return ChatCompletion(
result=True,
message=choice["message"],
finish_reason=choice["finish_reason"],
model=choice.get("model", None)
@@ -101,6 +103,7 @@ class Anthropic(Platform):
# plugin.log.debug(f"响应内容:{response.status}, {await response.json()}")
if response.status != 200:
return ChatCompletion(
result=False,
message={},
finish_reason=f"Error: {await response.text()}",
model=None
@@ -108,6 +111,7 @@ class Anthropic(Platform):
response_json = await response.json()
text = "\n\n".join(c["text"] for c in response_json["content"])
return ChatCompletion(
result=True,
message=dict(role="assistant", content=text),
finish_reason=response_json['stop_reason'],
model=response_json['model']
@@ -115,10 +119,17 @@ class Anthropic(Platform):
pass
async def list_models(self) -> List[str]:
# 由于没有列出所有支持的模型的api所有只能写死在代码中
models = ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229 ", "claude-3-sonnet-20240229",
"claude-3-haiku-20240307"]
return [f"- {m}" for m in models]
# 调用openai接口获取模型列表
full_url = f"{self.url}/v1/models"
headers = {
'anthropic-version': "2023-06-01",
'X-Api-Key': f"{self.api_key}"
}
async with self.http.get(full_url, headers=headers) as response:
if response.status != 200:
return []
response_data = await response.json()
return [f"- {m['id']}" for m in response_data['data']]
def get_type(self) -> str:
return "anthropic"
@@ -159,6 +170,7 @@ class XAi(Platform):
# plugin.log.debug(f"响应内容:{response.status}, {await response.json()}")
if response.status != 200:
return ChatCompletion(
result=False,
message={},
finish_reason=f"Error: {await response.text()}",
model=None
@@ -166,6 +178,7 @@ class XAi(Platform):
response_json = await response.json()
choice = response_json["choices"][0]
return ChatCompletion(
result=True,
message=choice["message"],
finish_reason=choice["finish_reason"],
model=response_json["model"]