# main.py
import os

from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain.prompts import PromptTemplate
from langchain_community.llms import OpenAI
from pydantic import BaseModel

app = FastAPI(title="DeepSeek-like Chatbot API")

# 允许跨域请求
app.add_middleware(
	CORSMiddleware,
	allow_origins=["*"],
	allow_credentials=True,
	allow_methods=["*"],
	allow_headers=["*"],
)

# 环境变量设置
load_dotenv()

# 初始化模型和记忆
llm = OpenAI(temperature=0.7, model='qwen-plus',
             api_key=os.getenv("OPENAI_API_KEY"),
             base_url=os.getenv("BASE_URL"),
             )

memory = ConversationBufferMemory()

# 自定义提示模板，模拟DeepSeek风格
prompt_template = """你是一个名为DeepSeek的AI助手，由深度求索公司开发。
你乐于助人、友好且知识渊博。请用中文回答用户的问题。

当前对话：
{history}
人类：{input}
DeepSeek："""

PROMPT = PromptTemplate(
	input_variables=["history", "input"], template=prompt_template
)

conversation = ConversationChain(
	llm=llm,
	prompt=PROMPT,
	memory=memory,
	verbose=True
)


class ChatRequest(BaseModel):
	message: str


class ChatResponse(BaseModel):
	response: str


@app.post("/chat", response_model=ChatResponse)
async def chat_endpoint(request: ChatRequest):
	try:
		response = conversation.predict(input=request.message)
		return ChatResponse(response=response)
	except Exception as e:
		raise HTTPException(status_code=500, detail=str(e))


@app.get("/")
async def root():
	return {"message": "DeepSeek-like Chatbot API is running!"}


if __name__ == "__main__":
	import uvicorn

	uvicorn.run(app, host="0.0.0.0", port=8000)
