diff --git a/base-config.yaml b/base-config.yaml index 7d58668..1703005 100644 --- a/base-config.yaml +++ b/base-config.yaml @@ -31,6 +31,12 @@ platforms: max_tokens: 2000 max_words: 1000 max_context_messages: 20 + deepseek: + url: https://api.deepseek.com + api_key: + model: + max_words: 1000 + max_context_messages: 20 openai: url: https://api.openai.com api_key: diff --git a/maubot_llmplus/thrid_platform.py b/maubot_llmplus/thrid_platform.py index e8665cc..b8d89e3 100644 --- a/maubot_llmplus/thrid_platform.py +++ b/maubot_llmplus/thrid_platform.py @@ -12,6 +12,52 @@ from maubot_llmplus.platforms import Platform, ChatCompletion from maubot_llmplus.plugin import AbsExtraConfigPlugin +class Deepseek(Platform): + + def __init__(self, config: BaseProxyConfig, http: ClientSession): + super().__init__(config, http) + + async def create_chat_completion(self, plugin: AbsExtraConfigPlugin, evt: MessageEvent) -> ChatCompletion: + full_context = [] + context = await maubot_llmplus.platforms.get_context(plugin, self, evt) + full_context.extend(list(context)) + + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key}" + } + data = { + "model": self.model, + "messages": full_context, + } + + endpoint = f"{self.url}/chat/completions" + async with self.http.post( + endpoint, headers=headers, data=json.dumps(data) + ) as response: + if response.status != 200: + return ChatCompletion( + result=False, + message={}, + finish_reason=f"Error: {await response.text()}", + model=None + ) + response_json = await response.json() + choice = response_json["choices"][0] + return ChatCompletion( + result=True, + message=choice["message"], + finish_reason=choice["finish_reason"], + model=response_json.get("model", None) + ) + + async def list_models(self) -> List[str]: + models = ["deepseek-chat", "deepseek-reasoner"] + return [f"- {m}" for m in models] + + def get_type(self) -> str: + return "deepseek" + class OpenAi(Platform): max_tokens: int temperature: int