Files
maubot-llmplus/base-config.yaml
taylorxie 9f25fdab12 add
2026-03-09 22:43:02 +08:00

93 lines
2.0 KiB
YAML

# allow users
allowed_users: []
# allow update and read permission users
allow_update_read_command_users: []
# allow readonly permission users
allow_readonly_command_users: []
# current use platform
use_platform: local_ai
# bot name
name: "ai bot"
reply_in_thread: true
enable_multi_user: true
# system prompt
system_prompt: "response in chinese"
# platform config
platforms:
local_ai:
type: ollama
url: http://192.168.32.162:11434
api_key:
model: llama3.2
temperature: 1
max_tokens: 2000
max_words: 1000
max_context_messages: 20
qwen:
# 国内: https://dashscope.aliyuncs.com
# 海外: https://dashscope-intl.aliyuncs.com
url: https://dashscope.aliyuncs.com
api_key:
model: qwen-plus
temperature: 0.7
top_p: 0.8
max_tokens: 2000
max_words: 1000
max_context_messages: 20
# 是否开启深度思考模式(仅 qwq 系列支持)
enable_thinking: false
deepseek:
url: https://api.deepseek.com
api_key:
model:
max_words: 1000
max_context_messages: 20
gemini:
url: https://generativelanguage.googleapis.com
api_key:
model: gemini-2.0-flash
temperature: 1
max_tokens: 2000
max_words: 1000
max_context_messages: 20
openai:
url: https://api.openai.com
api_key:
model: gpt-4o-mini
max_tokens: 2000
max_words: 1000
max_context_messages: 20
temperature: 1
anthropic:
url: https://api.anthropic.com
api_key:
model: claude-3-5-sonnet-20240620
max_words: 1000
max_tokens: 2000
max_context_messages: 20
# 是否开启流式输出(开启后 Element 中消息会逐步更新)
streaming: false
xai:
url: https://api.x.ai
api_key:
model: grok-beta
temperature: 1
max_tokens: 1000
max_words: 2000
max_context_messages: 20
# additional prompt
additional_prompt:
- role: user
content: "What model is currently in use?"
- role: system
content: "you can response text contain user name"