import os

from dashscope import Generation
from openai import OpenAI
from rest_framework.response import Response
from rest_framework.views import APIView
from django.contrib.auth.models import User
from django.http import StreamingHttpResponse
import uuid
import json
from .redis_service import ChatRedisService
from .models import ChatSession


class MutChatAPIView(APIView):
    def post(self, request):
        question = request.data.get('question')
        if not question:
            # 请输入问题
            return Response({"error": "请输入问题"})

        # 调用ChatOpenAi
        client = OpenAI(
            # 若没有配置环境变量，请用百炼API Key将下行替换为：api_key="sk-xxx",
            api_key=os.getenv("DASHSCOPE_API_KEY"),
            base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
        )
        completion = client.chat.completions.create(
            model="qwen-plus",
            # 此处以qwen-plus为例，可按需更换模型名称。模型列表：https://help.aliyun.com/zh/model-studio/getting-started/models
            messages=[{'role': 'system', 'content': '你是一个智能助手'},
                      {'role': 'user', 'content': question}],
            stream=True,
            stream_options={"include_usage": True}

        )

        # 创建流式响应
        def generate():
            for chunk in completion:
                # 检查chunk.choices是否存在且不为空
                if hasattr(chunk, 'choices') and chunk.choices:
                    content = chunk.choices[0].delta.content
                    if content:
                        yield f"data: {content}\n\n"

        return StreamingHttpResponse(generate(), content_type='text/event-stream')



