| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051 |
- from openai import OpenAI
- from fastapi import FastAPI, Query
- from typing import Optional
- def call_kimi(q):
- client = OpenAI(
- api_key="sk-SEg8Yh9nQQCS41jcuZZX9SaA5Nqdxfeb7AkQwUnN0TFjdNS2",
- base_url="https://api.moonshot.cn/v1",
- )
- response = client.chat.completions.create(
- model="moonshot-v1-128k",
- messages=[
- {
- "role": "system",
- "content": "你是 Kimi,由 Moonshot AI 提供的人工智能助手,你更擅长中文和英文的对话。你会为用户提供安全,有帮助,准确的回答。同时,你会拒绝一切涉及恐怖主义,种族歧视,黄色暴力等问题的回答。Moonshot AI 为专有名词,不可翻译成其他语言。",
- },
- {"role": "user", "content": f"{q}"},
- ],
- temperature=0.3,
- stream=True,
- )
- collected_messages = []
- for idx, chunk in enumerate(response):
- # print("Chunk received, value: ", chunk)
- chunk_message = chunk.choices[0].delta
- if not chunk_message.content:
- continue
- collected_messages.append(chunk_message) # save the message
- # print(f"#{idx}: {''.join([m.content for m in collected_messages])}")
- result = ''.join([m.content for m in collected_messages])
- # print(''.join([m.content for m in collected_messages]))
- return result
- app = FastAPI()
- @app.get("/")
- async def read_items(q: str = Query(None, description="Query string")):
- result_message = ''
- if q:
- result_message = call_kimi(q)
- return {"message": result_message}
- if __name__ == "__main__":
- import uvicorn
- uvicorn.run(app, host="0.0.0.0", port=8000)
|