add: 添加ollama调用AI chat逻辑
This commit is contained in:
@@ -24,19 +24,20 @@ class Ollama(Platform):
|
|||||||
|
|
||||||
endpoint = f"{self.url}/api/chat"
|
endpoint = f"{self.url}/api/chat"
|
||||||
req_body = {'model': self.model, 'messages': full_context, 'steam': False}
|
req_body = {'model': self.model, 'messages': full_context, 'steam': False}
|
||||||
headers = {}
|
headers = {'Content-Type': 'application/json'}
|
||||||
if self.api_key is not None:
|
if self.api_key is not None:
|
||||||
headers['Authorization'] = self.api_key
|
headers['Authorization'] = self.api_key
|
||||||
headers['Content-Type'] = 'application/json'
|
|
||||||
plugin.log.debug(f"{json.dumps(req_body)}")
|
plugin.log.debug(f"{json.dumps(req_body)}")
|
||||||
async with self.http.post(endpoint, headers=headers, data=json.dumps(req_body)) as response:
|
async with self.http.post(endpoint, headers=headers, data=json.dumps(req_body)) as response:
|
||||||
plugin.log.debug(f"响应内容:{response.status}, {response.json()}")
|
plugin.log.debug(f"响应内容:{response.status}, {await response.json()}")
|
||||||
if response.status != 200:
|
if response.status != 200:
|
||||||
return ChatCompletion(
|
return ChatCompletion(
|
||||||
message={},
|
message={},
|
||||||
finish_reason=f"http status {response.status}",
|
finish_reason=f"http status {response.status}",
|
||||||
model=None
|
model=None
|
||||||
)
|
)
|
||||||
|
text = await response.text()
|
||||||
|
plugin.log.debug(f"解析后的响应内容: {text}")
|
||||||
response_json = await response.json()
|
response_json = await response.json()
|
||||||
return ChatCompletion(
|
return ChatCompletion(
|
||||||
message=response_json['message'],
|
message=response_json['message'],
|
||||||
|
|||||||
Reference in New Issue
Block a user