api_ollama.py 1.3 KB

1234567891011121314151617181920212223242526272829303132333435
  1. # -*- coding: utf-8 -*-
  2. import time
  3. from ollama import Client as oClient
  4. class ChatBot:
  5. def call_ollama(self, host, messages, model='llava:13b', temperature=0.4):
  6. self.client = oClient(host=host)
  7. self.model = model
  8. self.messages = messages
  9. self.temperature = temperature
  10. print(f'use model: {self.model}')
  11. try:
  12. response_iter = self.client.chat(model=self.model,
  13. messages=[
  14. {'role': 'system', 'content': '你是一个新闻整理专员'},
  15. {'role': 'user', 'content': self.messages}
  16. ],
  17. options={"temperature": self.temperature},
  18. stream=False)
  19. return response_iter['message']['content']
  20. except Exception as e:
  21. print(f"\n发生错误: {e}")
  22. # if __name__ == "__main__":
  23. # C = ChatBot()
  24. # start_time = time.time()
  25. #
  26. # response_context = C.call_ollama('http://127.0.0.1:11434', 'hello,你好呀', 'llava:13b')
  27. # print(response_context)
  28. #
  29. # end_time = time.time()
  30. # run_time = end_time - start_time
  31. # print(f"程序运行时间:{run_time} 秒\n")