api_ollama.py 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. # -*- coding: utf-8 -*-
  2. import time
  3. import subprocess
  4. import sys
  5. import timeit
  6. from ollama import Client as oClient
  7. '''
  8. 可选模型:
  9. qwen:1.8b
  10. qwen2:1.5b
  11. qwen2.5:3b
  12. gemma2:2b
  13. '''
  14. model_list = [
  15. 'qwen:1.8b',
  16. 'qwen2:1.5b',
  17. 'qwen2.5:3b',
  18. 'gemma2:2b'
  19. ]
  20. class ChatBot:
  21. def __init__(self, host, messages, model='qwen:1.8b', temperature=0.4):
  22. self.client = oClient(host=host)
  23. self.model = model
  24. self.messages = messages
  25. self.temperature = temperature
  26. def start_chat(self):
  27. print(f'use model: {self.model}')
  28. try:
  29. response_iter = self.client.chat(model=self.model,
  30. messages=[
  31. {'role': 'user', 'content': '你是一个新闻整理专员'},
  32. {'role': 'user', 'content': self.messages}
  33. ],
  34. options={"temperature": self.temperature},
  35. stream=False)
  36. return response_iter['message']['content']
  37. except Exception as e:
  38. print(f"\n发生错误: {e}")
  39. # if __name__ == "__main__":
  40. # for m in model_list:
  41. # C = ChatBot('http://erhe.top:27381', 'hello,你好呀', m)
  42. # start_time = time.time()
  43. #
  44. # response_context = C.start_chat()
  45. # print(response_context)
  46. #
  47. # end_time = time.time()
  48. # run_time = end_time - start_time
  49. # print(f"程序运行时间:{run_time} 秒\n")