jack 11 ay önce
işleme
1219947280
3 değiştirilmiş dosya ile 150 ekleme ve 0 silme
  1. 63 0
      .gitignore
  2. 87 0
      OllamaClient.py
  3. BIN
      ollama.ico

+ 63 - 0
.gitignore

@@ -0,0 +1,63 @@
+.DS_Store
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+.idea/*
+xml_files/
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+other/split_clash_config/split_config

+ 87 - 0
OllamaClient.py

@@ -0,0 +1,87 @@
+import time
+import subprocess
+import sys
+
+class ChatBot:
+    def __init__(self, host):
+        self.oclient = oClient(host=host)
+        self.model_dict = {
+            "1": "qwen:1.8b",
+            "2": "qwen2:1.5b",
+            "3": "qwen2.5:3b",
+            "4": "gemma2:2b"
+        }
+        self.messages = []
+        self.temperature = 0.7  # 默认温度
+
+    def select_model(self):
+        try:
+            select_model = input("1 qwen:1.8b\n2 qwen2:1.5b\n3 qwen2.5:3b\n4 gemma2:2b\n请选择模型: ")
+            if select_model == "":
+                select_model = "1"
+            use_model = self.model_dict[select_model]
+            print("使用模型: " + use_model)
+            return use_model
+        except KeyError:
+            print("选择的模型编号不存在,请重新选择。")
+            return self.select_model()
+
+    def set_temperature(self):
+        temp_input = input("请输入temperature参数(默认为0.7):")
+        if temp_input.strip() == '':
+            self.temperature = 0.7
+        else:
+            try:
+                self.temperature = float(temp_input)
+            except ValueError:
+                print("输入的temperature参数无效,使用默认值0.7。")
+                self.temperature = 0.7
+
+    def set_ai_role(self):
+        ai_role = input("输入机器人角色(默认为聊天机器人): ")
+        if ai_role == '':
+            ai_role = '聊天机器人'
+        self.messages.append({'role': 'system', 'content': f'你是{ai_role}'})
+        return ai_role
+
+    def start_chat(self, use_model):
+        while True:
+            user_input = input('输入对话内容(输入exit退出):\n')
+            if user_input.lower() == 'exit':
+                exit(0)
+            if user_input == '':
+                print('输入内容不能为空, 请重新输入, 或输入exit退出\n')
+                continue
+            self.messages.append({'role': 'user', 'content': user_input})
+            try:
+                response_iter = self.oclient.chat(model=use_model, messages=self.messages, options={"temperature": self.temperature}, stream=True)
+                
+                full_response_content = ""
+                
+                for response_part in response_iter:
+                    if 'message' in response_part and 'content' in response_part['message']:
+                        content = response_part['message']['content']
+                        full_response_content += content
+                        print(content, end='', flush=True)
+                        time.sleep(0.1)                
+                print()
+
+                self.messages.append({'role': 'assistant', 'content': full_response_content})
+            except Exception as e:
+                print(f"\n发生错误: {e}")
+
+# pip install ollama
+try:
+    from ollama import Client as oClient
+except ImportError:
+    print("ollama 包未安装,正在安装...")
+    subprocess.check_call([sys.executable, "-m", "pip", "install", "ollama"])
+    from ollama import Client as oClient
+
+if __name__ == "__main__":
+    host = 'http://erhe.top:27381'
+    chat_bot = ChatBot(host)
+    use_model = chat_bot.select_model()
+    chat_bot.set_temperature()
+    chat_bot.set_ai_role()
+    chat_bot.start_chat(use_model)

BIN
ollama.ico