This commit is contained in:
taylorxie
2026-03-09 21:22:00 +08:00
parent 94f16c4f8b
commit 04077c7f12
2 changed files with 52 additions and 0 deletions

View File

@@ -31,6 +31,12 @@ platforms:
max_tokens: 2000
max_words: 1000
max_context_messages: 20
deepseek:
url: https://api.deepseek.com
api_key:
model:
max_words: 1000
max_context_messages: 20
openai:
url: https://api.openai.com
api_key:

View File

@@ -12,6 +12,52 @@ from maubot_llmplus.platforms import Platform, ChatCompletion
from maubot_llmplus.plugin import AbsExtraConfigPlugin
class Deepseek(Platform):
def __init__(self, config: BaseProxyConfig, http: ClientSession):
super().__init__(config, http)
async def create_chat_completion(self, plugin: AbsExtraConfigPlugin, evt: MessageEvent) -> ChatCompletion:
full_context = []
context = await maubot_llmplus.platforms.get_context(plugin, self, evt)
full_context.extend(list(context))
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {self.api_key}"
}
data = {
"model": self.model,
"messages": full_context,
}
endpoint = f"{self.url}/chat/completions"
async with self.http.post(
endpoint, headers=headers, data=json.dumps(data)
) as response:
if response.status != 200:
return ChatCompletion(
result=False,
message={},
finish_reason=f"Error: {await response.text()}",
model=None
)
response_json = await response.json()
choice = response_json["choices"][0]
return ChatCompletion(
result=True,
message=choice["message"],
finish_reason=choice["finish_reason"],
model=response_json.get("model", None)
)
async def list_models(self) -> List[str]:
models = ["deepseek-chat", "deepseek-reasoner"]
return [f"- {m}" for m in models]
def get_type(self) -> str:
return "deepseek"
class OpenAi(Platform):
max_tokens: int
temperature: int