import json

from django.http import JsonResponse, StreamingHttpResponse
from django.shortcuts import render
from rest_framework.views import APIView
from user.models import SysUser
from .models import Conversation
from .coze_test.conversation import CozeConversation
from .drission_page.scrape_data_jd import jd_main


# Create your views here.
# api/recoBot/conversation/<str:conversation_id>
# api/recoBot/conversation/
class ConversationView(APIView):
    def get(self, request):  # 获取所有会话
        coze = CozeConversation()
        user_id = request.query_params.get("user_id")  # 获取用户id
        try:
            user = SysUser.objects.get(id=user_id)  # 用户是否存在
            conversations = coze.find_all_conversation()  # 获取所有会话
            all_conversation = list(
                user.conversations.all().values("user_id", "conversation_id", "name"))  # 在数据库中获取用户所有会话
            return JsonResponse({"data": all_conversation})  # 给前端返回所有会话
        except Exception as e:
            return JsonResponse({"message": "用户不存在"})

    def post(self, request):  # 新建会话
        coze = CozeConversation()  # 我封装的扣子接口，你不用管
        user_id = request.data.get("user_id")  # 获取用户id
        question = request.data.get("question", '')  # 获取问题
        try:
            SysUser.objects.get(id=user_id)  # 用户是否存在
            if question == '':  # 如果问题为空，则返回请输入问题
                return JsonResponse({"message": "请输入问题"})
            conversation_info = coze.create_conversation(question)  # 新建会话并获取会话信息
            conversation_id = conversation_info.get("data").get("id")  # 获取会话id
            Conversation.objects.create(user_id=user_id, conversation_id=conversation_id, name=question)  # 将会话保存到数据库
            answer = coze.have_conversation(user_id, conversation_id, question)  # 我封装的扣子接口，你不用管
            response = StreamingHttpResponse(
                streaming_content=answer(),
                content_type="text/event-stream",
                status=200,
            )
            response['Cache-Control'] = 'no-cache'  # SSE / 流式响应里，如果不加这个头，浏览器可能会把上一次请求的内容缓存起来，导致用户看到的是“旧数据”，防止缓存，看到之前的数据
            response['X-Accel-Buffering'] = 'no'  # 关闭 Nginx 缓冲（如用 Nginx）
            return response
        except Exception as e:
            return JsonResponse({"message": "消息发送失败", "error": str(e)})

    def delete(self, request, conversation_id):  # 删除会话
        coze = CozeConversation()  # 我封装的扣子接口，你不用管
        user_id = request.query_params.get("user_id")  # 获取用户id
        try:
            user = SysUser.objects.get(id=user_id)  # 用户是否存在
            result = coze.delete_conversation(conversation_id)  # 将会话从扣子接口中删除
            user.conversations.get(conversation_id=conversation_id).delete()  # 将会话从数据库中删除
            return JsonResponse({"data": result})
        except Exception as e:
            return JsonResponse({"message": "删除失败", "error": str(e)})

    def put(self, request, conversation_id):  # 更新会话名称
        coze = CozeConversation()  # 我封装的扣子接口，你不用管
        user_id = request.data.get("user_id")  # 获取用户id
        conversation_name = request.data.get("conversation_name", '')  # 获取会话名称
        try:
            user = SysUser.objects.get(id=user_id)  # 用户是否存在
            result = coze.update_conversation(conversation_id, conversation_name)  # 更新扣子会话名称
            conversation = user.conversations.get(conversation_id=conversation_id)  # 根据会话id获取数据库会话对象
            conversation.name = conversation_name  # 更新数据库会话名称
            conversation.save()  # 保存更改
            return JsonResponse({"data": result})
        except Exception as e:
            return JsonResponse({"message": "更新失败", "error": str(e)})


# api/recoBot/dialogue/
class DialogueView(APIView):  # 对话
    def get(self, request):
        coze = CozeConversation()
        conversation_id = request.query_params.get("conversation_id")  # 获取会话id
        messages = coze.find_message_list(conversation_id)  # 获取会话消息列表
        dialogue = []  # 暂时存储对话列表
        question_answer = {}  # 存储一组问题答案字典
        # print(messages.get("data"))
        for message in messages.get("data"):
            if message.get("role") == "assistant":  # 获取答案
                question_answer["assistant"] = {"content": message.get("content"),
                                                "message_id": message.get("id")}  # 将答案和id暂时写入字典中
            elif message.get("role") == "user":  # 获取问题
                question_answer["user"] = {"content": message.get("content"),
                                           "message_id": message.get("id")}  # 将问题和id暂时写入字典中
                dialogue.append(question_answer)  # 将问题答案和id写入对话列表中
                question_answer = {}  # 清空字典
        # print(dialogue)
        return JsonResponse({"data": dialogue})

    def post(self, request):
        coze = CozeConversation()  # 我封装的扣子接口，你不用管
        user_id = request.data.get("user_id")  # 获取用户id
        conversation_id = request.data.get("conversation_id")  # 获取会话id
        question = request.data.get("question", '')  # 获取前端输入问题
        try:
            SysUser.objects.get(id=user_id)  # 用户是否存在
            if conversation_id == '':  # 如果会话id为空，则返回请输入会话id
                return JsonResponse({"message": "会话id不正确"})
            answer = coze.have_conversation(user_id, conversation_id, question)  # 我封装的扣子接口，你不用管
            response = StreamingHttpResponse(
                streaming_content=answer(),
                content_type="text/event-stream",
                status=200,
            )
            response['Cache-Control'] = 'no-cache'  # SSE / 流式响应里，如果不加这个头，浏览器可能会把上一次请求的内容缓存起来，导致用户看到的是“旧数据”，防止缓存，看到之前的数据
            response['X-Accel-Buffering'] = 'no'  # 关闭 Nginx 缓冲（如用 Nginx）
            return response
        except Exception as e:
            return JsonResponse({"message": "消息发送失败", "error": str(e)})


# api/recoBot/get_data/
class GetData(APIView):
    def get(self, request):
        return JsonResponse({"data": "123"})

    # def post(self, request):    # user_id, conversation_id, product, price, brand
    #     coze = CozeConversation()
    #     user_id = request.data.get("user_id")  # 获取用户id
    #     conversation_id = request.data.get("conversation_id")  # 获取会话id
    #     product = request.data.get("product")
    #     price = request.data.get("price")
    #     brand = request.data.get("brand")
    #     product_info = main(product=product, price=price, brand=brand)
    #     answer = coze.have_conversation(user_id, conversation_id, product_info, clear_the_message=True)
    #     response = StreamingHttpResponse(
    #         streaming_content=answer(),
    #         content_type="text/event-stream",
    #         status=200,
    #     )
    #     response['Cache-Control'] = 'no-cache'  # SSE / 流式响应里，如果不加这个头，浏览器可能会把上一次请求的内容缓存起来，导致用户看到的是“旧数据”，防止缓存，看到之前的数据
    #     response['X-Accel-Buffering'] = 'no'  # 关闭 Nginx 缓冲（如用 Nginx）
    #     return response

    def post(self, request):    # user_id, conversation_id, product, price, brand
        coze = CozeConversation()
        user_id = request.data.get("user_id")  # 获取用户id
        conversation_id = request.data.get("conversation_id")  # 获取会话id
        product = request.data.get("product")
        min_price = request.data.get("min_price", '')
        max_price = request.data.get("max_price")
        brand = request.data.get("brand")
        # print(product, min_price, max_price, brand)
        if min_price:
            jd_product_info = jd_main(product=product, min_price=min_price, max_price=max_price, brand=brand)
            # tb_product_info = tb_main(product=product, min_price=int(min_price), max_price=int(max_price), brand=brand)
        else:
            jd_product_info = jd_main(product=product, max_price=max_price, brand=brand)
            # tb_product_info = tb_main(product=product, max_price=int(max_price), brand=brand)
        # all_product = {"jd_product": jd_product_info, "tb_product": tb_product_info}
        all_product = {"jd_product": jd_product_info}
        answer = coze.have_conversation(user_id, conversation_id, all_product, clear_the_message=True)
        response = StreamingHttpResponse(
            streaming_content=answer(),
            content_type="text/event-stream",
            status=200,
        )
        response['Cache-Control'] = 'no-cache'  # SSE / 流式响应里，如果不加这个头，浏览器可能会把上一次请求的内容缓存起来，导致用户看到的是“旧数据”，防止缓存，看到之前的数据
        response['X-Accel-Buffering'] = 'no'  # 关闭 Nginx 缓冲（如用 Nginx）
        return response


# from openai import OpenAI
#
#
# # def test(request):
# def test(request):
#     def get_response(messages):
#         client = OpenAI(
#             # 若没有配置环境变量，请用阿里云百炼API Key将下行替换为：api_key="sk-xxx",
#             api_key="sk-3c153f4289db40e9a0f019adccfaeaf6",
#             base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
#         )
#         # 模型列表：https://help.aliyun.com/zh/model-studio/getting-started/models
#         completion = client.chat.completions.create(model="qwen-plus", messages=messages)
#         return completion
#
#     def generate_responses():
#         print("欢迎来到阿里云百炼手机商店！")
#         # 初始化一个 messages 数组
#         messages = [
#             {
#                 "role": "system",
#                 "content": """你是一名阿里云百炼手机商店的店员，你负责给用户推荐手机。手机有两个参数：屏幕尺寸（包括6.1英寸、6.5英寸、6.7英寸）、分辨率（包括2K、4K）。
#                     你一次只能向用户提问一个参数。如果用户提供的信息不全，你需要反问他，让他提供没有提供的参数。如果参数收集完成，你要说：我已了解您的购买意向，请稍等。""",
#             }
#         ]
#         assistant_output = "欢迎光临阿里云百炼手机商店，您需要购买什么尺寸的手机呢？"
#         # 使用 yield 返回第一个输出
#         yield f"data: {assistant_output}\n\n"
#         print(f"模型输出：{assistant_output}\n")
#
#         while "我已了解您的购买意向" not in assistant_output:
#             # 获取用户输入（实际应用中应从请求中获取）
#             # 这里为了演示使用固定输入，实际应用中需要从request获取
#             user_input = input()  # 示例获取方式
#
#             # 将用户问题信息添加到messages列表中
#             messages.append({"role": "user", "content": user_input})
#             assistant_output = get_response(messages).choices[0].message.content
#             # 将大模型的回复信息添加到messages列表中
#             messages.append({"role": "assistant", "content": assistant_output})
#
#             # 使用 yield 返回每次循环的结果
#             yield f"data: {assistant_output}\n\n"
#             print(f"模型输出：{assistant_output}")
#             print("\n")
#
#     # 返回流式响应
#     response = StreamingHttpResponse(
#         streaming_content=generate_responses(),
#         content_type="text/event-stream",
#         status=200,
#     )
#     response['Cache-Control'] = 'no-cache'
#     response['X-Accel-Buffering'] = 'no'
#     return response
