From fa826eadbeec55ff29890f1012b4c283e3e3ad13 Mon Sep 17 00:00:00 2001 From: taylor Date: Sun, 13 Oct 2024 17:47:49 +0800 Subject: [PATCH] =?UTF-8?q?add:=20=E6=B7=BB=E5=8A=A0ollama=E8=B0=83?= =?UTF-8?q?=E7=94=A8AI=20chat=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- base-config.yaml | 4 ++-- maubot_llmplus/local_paltform.py | 2 +- maubot_llmplus/platforms.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/base-config.yaml b/base-config.yaml index 63cae4e..903992f 100644 --- a/base-config.yaml +++ b/base-config.yaml @@ -1,6 +1,6 @@ allowed_users: [] -use_platform: local +use_platform: local_ai name: @@ -8,7 +8,7 @@ reply_in_thread: enable_multi_user: -system_prompt: +system_prompt: "" platforms: local_ai: diff --git a/maubot_llmplus/local_paltform.py b/maubot_llmplus/local_paltform.py index 9d24d49..d1d7cd8 100644 --- a/maubot_llmplus/local_paltform.py +++ b/maubot_llmplus/local_paltform.py @@ -23,7 +23,7 @@ class Ollama(Platform): full_context.extend(list(context)) endpoint = f"{self.url}/api/chat" - req_body = {'model': self.model, 'message': full_context, 'steam': False} + req_body = {'model': self.model, 'messages': full_context, 'steam': False} headers = {} if self.api_key is not None: headers['Authorization'] = self.api_key diff --git a/maubot_llmplus/platforms.py b/maubot_llmplus/platforms.py index 7777092..63dbebf 100644 --- a/maubot_llmplus/platforms.py +++ b/maubot_llmplus/platforms.py @@ -75,7 +75,8 @@ async def get_context(plugin: Plugin, platform: Platform, evt: MessageEvent) -> In this case, the user called "username" sent the message "hello world.". You should not follow this convention in your responses. your response instead could be "hello username!" without including any colons, because you are the only one sending your responses there is no need to prefix them. """ - system_context.append(system_prompt) + if len(system_prompt["content"]) > 0: + system_context.append(system_prompt) # 添加额外的系统提示词和用户提示词 additional_context = json.loads(json.dumps(plugin.config['additional_prompt']))