from utils.customer_util import CustomerUtil
import requests
from django.http import StreamingHttpResponse


# Create your views here.

def ai_chat(request):
    params = CustomerUtil.get_request_params(request)
    # 获取用户提示词
    question = params.get('prompt')
    #  调用 deepseek-r1 模型
    url = "https://openai.qiniu.com/v1/chat/completions"
    payload = {
        "stream": True,  # 响应以流式方式输出
        "model": "deepseek-v3",
        "messages": [
            {
                "role": "system",
                "content": "You are a helpful assistant"
            },
            {
                "role": "user",
                "content": question  # 用户的问题
            }
        ]
    }
    headers = {
        "Authorization": "sk-5985e1e1ca23d99902a04801255ce565d316d6f3d3e39b745e192a588d0afcd0",  # 调用deepseek的大模型的凭证信息
        "Content-Type": "application/json"
    }

    response = requests.post(url, json=payload, headers=headers, stream=True)

    # 生成器
    def generator():
        for item in response.iter_lines():
            if item:
                #  信息块
                line = item.decode('utf-8')  # 解码
                yield f'{line}\n\n'

    # 返回流式输出响应对象
    return StreamingHttpResponse(generator(), content_type='text/event-stream')
