api_ollama.py 1.4 KB

123456789101112131415161718192021222324252627282930313233343536
  1. # -*- coding: utf-8 -*-
  2. import time
  3. from ollama import Client as oClient
  4. class OllamaChat(object):
  5. def call_ollama(self, host, text, prompt_words, model='llava:13b', temperature=0.4):
  6. messages = text + '\n\n' + prompt_words
  7. self.client = oClient(host=host)
  8. self.model = model
  9. self.messages = messages
  10. self.temperature = temperature
  11. print(f'use model: {self.model}')
  12. try:
  13. response_iter = self.client.chat(model=self.model,
  14. messages=[
  15. {'role': 'system', 'content': '你是一个新闻报播员'},
  16. {'role': 'user', 'content': self.messages}
  17. ],
  18. options={"temperature": self.temperature},
  19. stream=False)
  20. return response_iter['message']['content']
  21. except Exception as e:
  22. print(f"\n发生错误: {e}")
  23. # if __name__ == "__main__":
  24. # C = ChatBot()
  25. # start_time = time.time()
  26. #
  27. # response_context = C.call_ollama('http://127.0.0.1:11434', 'hello,你好呀', 'llava:13b')
  28. # print(response_context)
  29. #
  30. # end_time = time.time()
  31. # run_time = end_time - start_time
  32. # print(f"程序运行时间:{run_time} 秒\n")