diff --git a/maubot_llmplus/local_paltform.py b/maubot_llmplus/local_paltform.py index 7ae2e5d..879ef20 100644 --- a/maubot_llmplus/local_paltform.py +++ b/maubot_llmplus/local_paltform.py @@ -96,7 +96,7 @@ class LmStudio(Platform): def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - self.temperature = self.config['temperature'] + self.temperature = float(self.config['temperature']) if self.config.get('temperature') is not None else None self.streaming = self.config.get('streaming', False) def is_streaming_enabled(self) -> bool: diff --git a/maubot_llmplus/thrid_platform.py b/maubot_llmplus/thrid_platform.py index d3d0f8d..56aa337 100644 --- a/maubot_llmplus/thrid_platform.py +++ b/maubot_llmplus/thrid_platform.py @@ -113,12 +113,12 @@ class Deepseek(Platform): class OpenAi(Platform): max_tokens: int - temperature: int + temperature: float def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - self.max_tokens = self.config['max_tokens'] - self.temperature = self.config['temperature'] + self.max_tokens = int(self.config['max_tokens']) if self.config.get('max_tokens') else None + self.temperature = float(self.config['temperature']) if self.config.get('temperature') is not None else None self.streaming = self.config.get('streaming', False) def is_streaming_enabled(self) -> bool: @@ -220,7 +220,7 @@ class Anthropic(Platform): def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - self.max_tokens = self.config['max_tokens'] + self.max_tokens = int(self.config['max_tokens']) if self.config.get('max_tokens') else None self.streaming = self.config.get('streaming', False) def is_streaming_enabled(self) -> bool: @@ -314,8 +314,8 @@ class Gemini(Platform): def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - self.max_tokens = self.config['max_tokens'] - self.temperature = self.config['temperature'] + self.max_tokens = int(self.config['max_tokens']) if self.config.get('max_tokens') else None + self.temperature = float(self.config['temperature']) if self.config.get('temperature') is not None else None self.streaming = self.config.get('streaming', False) def is_streaming_enabled(self) -> bool: @@ -431,8 +431,8 @@ class XAi(Platform): def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - self.temperature = self.config['temperature'] - self.max_tokens = self.config['max_tokens'] + self.temperature = float(self.config['temperature']) if self.config.get('temperature') is not None else None + self.max_tokens = int(self.config['max_tokens']) if self.config.get('max_tokens') else None self.streaming = self.config.get('streaming', False) def is_streaming_enabled(self) -> bool: @@ -526,9 +526,9 @@ class Qwen(Platform): def __init__(self, config: BaseProxyConfig, http: ClientSession) -> None: super().__init__(config, http) - self.max_tokens = self.config['max_tokens'] - self.temperature = self.config['temperature'] - self.top_p = self.config['top_p'] + self.max_tokens = int(self.config['max_tokens']) if self.config.get('max_tokens') else None + self.temperature = float(self.config['temperature']) if self.config.get('temperature') is not None else None + self.top_p = float(self.config['top_p']) if self.config.get('top_p') is not None else None self.enable_thinking = self.config['enable_thinking'] self.streaming = self.config.get('streaming', False)