| 1234567891011121314151617181920212223242526272829303132333435363738 |
- # -*- coding: utf-8 -*-
- import time
- from ollama import Client as oClient
- class OllamaChat(object):
- def call_ollama(self, host, role, text, prompt_words, model='llava:13b', temperature=0.4):
- message = text + '\n\n' + prompt_words
- print(f'use model: {model}')
- try:
- response_iter = oClient(host=host).chat(model=model,
- messages=[
- {'role': 'system', 'content': role},
- {'role': 'user', 'content': message}
- ],
- options={"temperature": temperature},
- stream=False)
- return response_iter['message']['content']
- except Exception as e:
- print(f"\n发生错误: {e}")
- # if __name__ == "__main__":
- # C = OllamaChat()
- # start_time = time.time()
- #
- # role = '你是一个聊天机器人'
- #
- # text = 'hello'
- #
- # prompt_words = '你好呀'
- #
- # response_context = C.call_ollama('http://192.168.31.28:11434', role, text, prompt_words, model='llava:13b')
- # print(response_context)
- #
- # end_time = time.time()
- # run_time = end_time - start_time
- # print(f"程序运行时间:{run_time} 秒\n")
|