diff --git a/maubot_llmplus/aibot.py b/maubot_llmplus/aibot.py index d044228..8acca0b 100644 --- a/maubot_llmplus/aibot.py +++ b/maubot_llmplus/aibot.py @@ -141,15 +141,15 @@ class AiBotPlugin(Plugin): if use_platform == 'local_ai': type = self.config['platforms']['local_ai']['type'] if type == 'ollama': - return Ollama(self.config, self.name, self.http) + return Ollama(self.config, self.http) elif type == 'lmstudio': - return LmStudio(self.config, self.name, self.http) + return LmStudio(self.config, self.http) else: raise ValueError(f"not found platform type: {type}") if use_platform == 'openai': - return OpenAi(self.config, self.name, self.http) + return OpenAi(self.config, self.http) if use_platform == 'anthropic': - return Anthropic(self.config, self.name, self.http) + return Anthropic(self.config, self.http) raise ValueError(f"unknown backend type {use_platform}") """ diff --git a/maubot_llmplus/local_paltform.py b/maubot_llmplus/local_paltform.py index 7cee119..fb14e21 100644 --- a/maubot_llmplus/local_paltform.py +++ b/maubot_llmplus/local_paltform.py @@ -14,8 +14,8 @@ from maubot_llmplus.platforms import Platform, ChatCompletion class Ollama(Platform): chat_api: str - def __init__(self, config: BaseProxyConfig, name: str, http: ClientSession) -> None: - super().__init__(config, name, http) + def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: + super().__init__(config, http) self.chat_api = '/api/chat' async def create_chat_completion(self, plugin: Plugin, evt: MessageEvent) -> ChatCompletion: @@ -55,8 +55,8 @@ class Ollama(Platform): class LmStudio(Platform): - def __init__(self, config: BaseProxyConfig, name: str, http: ClientSession) -> None: - super().__init__(config, name, http) + def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: + super().__init__(config, http) pass async def create_chat_completion(self, plugin: Plugin, evt: MessageEvent) -> ChatCompletion: diff --git a/maubot_llmplus/platforms.py b/maubot_llmplus/platforms.py index 9e4b2df..a118909 100644 --- a/maubot_llmplus/platforms.py +++ b/maubot_llmplus/platforms.py @@ -33,9 +33,8 @@ class Platform: additional_prompt: List[dict] system_prompt: str max_context_messages: int - name: str - def __init__(self, config: BaseProxyConfig, name: str, http: ClientSession) -> None: + def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: self.http = http self.config = config['platforms'][self.get_type()] self.url = self.config['url'] @@ -45,7 +44,6 @@ class Platform: self.max_context_messages = self.config['max_context_messages'] self.additional_prompt = config['additional_prompt'] self.system_prompt = config['system_prompt'] - self.name = name """a 调用AI对话接口, 响应结果 diff --git a/maubot_llmplus/thrid_platform.py b/maubot_llmplus/thrid_platform.py index d7691e9..8dd03cb 100644 --- a/maubot_llmplus/thrid_platform.py +++ b/maubot_llmplus/thrid_platform.py @@ -8,8 +8,8 @@ from maubot_llmplus.platforms import Platform, ChatCompletion class OpenAi(Platform): - def __init__(self, config: BaseProxyConfig, name: str, http: ClientSession) -> None: - super().__init__(config, name, http) + def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: + super().__init__(config, http) async def create_chat_completion(self, plugin: Plugin, evt: MessageEvent) -> ChatCompletion: # 获取系统提示词 @@ -23,8 +23,8 @@ class OpenAi(Platform): class Anthropic(Platform): - def __init__(self, config: BaseProxyConfig, name: str, http: ClientSession) -> None: - super().__init__(config, name, http) + def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: + super().__init__(config, http) async def create_chat_completion(self, plugin: Plugin, evt: MessageEvent) -> ChatCompletion: # 获取系统提示词