main.py 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. from openai import OpenAI
  2. from fastapi import FastAPI, Query
  3. from typing import Optional
  4. def call_kimi(q):
  5. client = OpenAI(
  6. api_key="sk-SEg8Yh9nQQCS41jcuZZX9SaA5Nqdxfeb7AkQwUnN0TFjdNS2",
  7. base_url="https://api.moonshot.cn/v1",
  8. )
  9. response = client.chat.completions.create(
  10. model="moonshot-v1-128k",
  11. messages=[
  12. {
  13. "role": "system",
  14. "content": "你是 Kimi,由 Moonshot AI 提供的人工智能助手,你更擅长中文和英文的对话。你会为用户提供安全,有帮助,准确的回答。同时,你会拒绝一切涉及恐怖主义,种族歧视,黄色暴力等问题的回答。Moonshot AI 为专有名词,不可翻译成其他语言。",
  15. },
  16. {"role": "user", "content": f"{q}"},
  17. ],
  18. temperature=0.3,
  19. stream=True,
  20. )
  21. collected_messages = []
  22. for idx, chunk in enumerate(response):
  23. # print("Chunk received, value: ", chunk)
  24. chunk_message = chunk.choices[0].delta
  25. if not chunk_message.content:
  26. continue
  27. collected_messages.append(chunk_message) # save the message
  28. # print(f"#{idx}: {''.join([m.content for m in collected_messages])}")
  29. result = ''.join([m.content for m in collected_messages])
  30. # print(''.join([m.content for m in collected_messages]))
  31. return result
  32. app = FastAPI()
  33. @app.get("/")
  34. async def read_items(q: str = Query(None, description="Query string")):
  35. result_message = ''
  36. if q:
  37. result_message = call_kimi(q)
  38. return {"message": result_message}
  39. if __name__ == "__main__":
  40. import uvicorn
  41. uvicorn.run(app, host="0.0.0.0", port=8000)