from fastapi import FastAPI, Request, Response
from fastapi.responses import StreamingResponse
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
import requests
import json
from collections import deque
import os
os.chdir('/home/wjb711/ai')
# 初始化 FastAPI 应用程序
app = FastAPI()

# 挂载静态文件目录（用于存放 CSS 和 JavaScript）
app.mount("/static", StaticFiles(directory="static"), name="static")

# 初始化 Jinja2 模板环境
templates = Jinja2Templates(directory="templates")

# 定义大模型 API 的 URL
MODEL_API_URL = "http://10.102.10.197:11434/v1/chat/completions"

# 定义大模型 API 的 API Key
API_KEY = "sk-pxcyyzfrmpfdwstqeehizrqgobwikmsbonaeworhnqvvnqro"

# 主页路由
@app.get("/")
async def read_root(request: Request):
    return templates.TemplateResponse("index.html", {"request": request})

# 处理表单提交的路由
@app.post("/")
async def process_form(request: Request):
    # 获取表单数据
    form_data = await request.form()
    user_input = form_data.get("user_input")
    print('***',form_data,user_input)
    
    # 调用大模型 API
    response_generator = call_model_api_stream(user_input)

    # 返回结果作为 StreamingResponse
    return StreamingResponse(response_generator, media_type="text/event-stream")

# 调用大模型 API 的函数，支持流式传输
history = deque(maxlen=10)
def call_model_api_stream(prompt: str):
    messages = [
        {"role": "system", "content": "你是xx中国南昌生产区的智能AI机器人"},
        {"role": "user", "content": prompt}
    ]
    
    # 将历史对话加入到messages列表中
    for message in history:
        messages.append(message)
    print('messages',messages)
    payload = {
        "model": "deepseek-v2:16b",
        "messages": messages,
#        "messages": [
#            {"role": "system", "content": "You are a helpful assistant."},
#            {"role": "user", "content": prompt}
#        ],
        "stream": True,  # 设置为 True 以启用流式传输
        "max_tokens": 512,
        "temperature": 0.2,
        "top_p": 0.7,
        "top_k": 50,
        "frequency_penalty": 0.5,
        "n": 1,
        "response_format": {"type": "text"},
        "num thread": 4
    }

    headers = {
        "Authorization": f"Bearer {API_KEY}",
        "Content-Type": "application/json"
    }

    try:
        with requests.post(MODEL_API_URL, json=payload, headers=headers, stream=True) as response:
            history.append({"role": "user", "content": prompt})
            for chunk in response.iter_lines():
                x=chunk.decode('utf-8')
                if x[6:12]=='[DONE]':
                    #print('&&&&&&&&&&')
                    break
                else:
                    #print('chunk.decode',chunk.decode('utf-8'),type(chunk),type(chunk.decode('utf-8')))
                    try:
                        x1=json.loads(x[5:])
                        #print('x1',x1)
                        y=x1['choices'][0]['delta']['content'].replace('\n', '<br>').replace(' ', '&nbsp;')
                        #print(y)
                        yield y
                    except:
                        pass
##                try:
##                    y=x['choices'][0]['delta']['content'].replace('\n', '<br>').replace(' ', '&nbsp;')
##                    print('y',y)
##                    yield y
##                except:
##                    pass
            
    except requests.exceptions.RequestException as e:
        print("请求失败：", e)
        yield f"data: 请求失败: {str(e)}\n\n"
# 启动应用
if __name__ == "__main__":
    import uvicorn
    
    uvicorn.run(app, host="127.0.0.1", port=8000)
