OllamaClient.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. import time
  2. import subprocess
  3. import sys
  4. class ChatBot:
  5. def __init__(self, host):
  6. self.oclient = oClient(host=host)
  7. self.model_dict = {
  8. "1": "qwen:1.8b",
  9. "2": "qwen2:1.5b",
  10. "3": "qwen2.5:3b",
  11. "4": "gemma2:2b"
  12. }
  13. self.messages = []
  14. self.temperature = 0.7 # 默认温度
  15. def select_model(self):
  16. try:
  17. select_model = input("1 qwen:1.8b\n2 qwen2:1.5b\n3 qwen2.5:3b\n4 gemma2:2b\n请选择模型: ")
  18. if select_model == "":
  19. select_model = "1"
  20. use_model = self.model_dict[select_model]
  21. print("使用模型: " + use_model)
  22. return use_model
  23. except KeyError:
  24. print("选择的模型编号不存在,请重新选择。")
  25. return self.select_model()
  26. def set_temperature(self):
  27. temp_input = input("请输入temperature参数(默认为0.7):")
  28. if temp_input.strip() == '':
  29. self.temperature = 0.7
  30. else:
  31. try:
  32. self.temperature = float(temp_input)
  33. except ValueError:
  34. print("输入的temperature参数无效,使用默认值0.7。")
  35. self.temperature = 0.7
  36. def set_ai_role(self):
  37. ai_role = input("输入机器人角色(默认为聊天机器人): ")
  38. if ai_role == '':
  39. ai_role = '聊天机器人'
  40. self.messages.append({'role': 'system', 'content': f'你是{ai_role}'})
  41. return ai_role
  42. def start_chat(self, use_model):
  43. while True:
  44. user_input = input('输入对话内容(输入exit退出):\n')
  45. if user_input.lower() == 'exit':
  46. exit(0)
  47. if user_input == '':
  48. print('输入内容不能为空, 请重新输入, 或输入exit退出\n')
  49. continue
  50. self.messages.append({'role': 'user', 'content': user_input})
  51. try:
  52. response_iter = self.oclient.chat(model=use_model, messages=self.messages, options={"temperature": self.temperature}, stream=True)
  53. full_response_content = ""
  54. for response_part in response_iter:
  55. if 'message' in response_part and 'content' in response_part['message']:
  56. content = response_part['message']['content']
  57. full_response_content += content
  58. print(content, end='', flush=True)
  59. time.sleep(0.1)
  60. print()
  61. self.messages.append({'role': 'assistant', 'content': full_response_content})
  62. except Exception as e:
  63. print(f"\n发生错误: {e}")
  64. # pip install ollama
  65. try:
  66. from ollama import Client as oClient
  67. except ImportError:
  68. print("ollama 包未安装,正在安装...")
  69. subprocess.check_call([sys.executable, "-m", "pip", "install", "ollama"])
  70. from ollama import Client as oClient
  71. if __name__ == "__main__":
  72. host = 'http://erhe.top:27381'
  73. chat_bot = ChatBot(host)
  74. use_model = chat_bot.select_model()
  75. chat_bot.set_temperature()
  76. chat_bot.set_ai_role()
  77. chat_bot.start_chat(use_model)