utils_call_ollama.py 1.4 KB

1234567891011121314151617181920212223242526272829303132333435363738
  1. # -*- coding: utf-8 -*-
  2. import time
  3. from ollama import Client as oClient
  4. class OllamaChat(object):
  5. def call_ollama(self, host, role, text, prompt_words, model='llava:13b', temperature=0.4):
  6. message = text + '\n\n' + prompt_words
  7. print(f'use model: {model}')
  8. try:
  9. response_iter = oClient(host=host).chat(model=model,
  10. messages=[
  11. {'role': 'system', 'content': role},
  12. {'role': 'user', 'content': message}
  13. ],
  14. options={"temperature": temperature},
  15. stream=False)
  16. return response_iter['message']['content']
  17. except Exception as e:
  18. print(f"\n发生错误: {e}")
  19. # if __name__ == "__main__":
  20. # C = OllamaChat()
  21. # start_time = time.time()
  22. #
  23. # role = '你是一个聊天机器人'
  24. #
  25. # text = 'hello'
  26. #
  27. # prompt_words = '你好呀'
  28. #
  29. # response_context = C.call_ollama('http://192.168.31.28:11434', role, text, prompt_words, model='llava:13b')
  30. # print(response_context)
  31. #
  32. # end_time = time.time()
  33. # run_time = end_time - start_time
  34. # print(f"程序运行时间:{run_time} 秒\n")