import datetime
import re

from django.db.models import Q
from django.http import JsonResponse
from django.shortcuts import render
from rest_framework.views import APIView
from .models import Sku, PriceLog, QueryLog
# from .getData.jd_data import jd_main
from .getData.get_data import GetData
from django.core.paginator import Paginator


# Create your views here.

# api/product/get_data
class ProductUrl(APIView):  # 用户查询数据接口
    # def get(self, request):
    #     s = Sku.objects.all().values()
    #     print(list(s))
    #     return JsonResponse({"message": "成功"}, json_dumps_params={"ensure_ascii": False})

    # 通过商品链接获取该商品历史数据接口
    def post(self, request):
        # 获取商品链接
        product_url = request.data.get("url")
        # 防止商品链接为空
        if not product_url:
            return JsonResponse({"error": "URL不能为空"}, json_dumps_params={"ensure_ascii": False})
        # 将淘宝或者天猫的链接中无用的部分剔除掉
        url_domain_name = re.search(r'://(.*?)/', product_url).group(1)  # 获取链接的主域名
        if url_domain_name == 'item.taobao.com' or url_domain_name == 'detail.tmall.com':
            id_match = re.search(r'[?&]id=(\d+)', product_url)  # 匹配商品id
            product_url = f'https://{url_domain_name}/item.htm?id={id_match.group(1)}'  # 获取商品链接
        # 获取商品对象
        product = Sku.objects.filter(url=product_url).first()
        # 判断该商品是否已存在
        if product:
            # 如果不存在查询记录则添加用户查询记录
            if not QueryLog.objects.filter(sku_id=product.id).first():
                QueryLog.objects.create(sku_id=product.id)
            # 否则在QueryLog表中更新用户最新查询时间
            else:
                QueryLog.objects.filter(sku_id=product.id).update(queried_at=datetime.datetime.now())
            # 获取商品历史数据
            data_logs = product.logs.all().values()
            # print(list(data_logs))
            # 如果商品历史数据为空或者今天没爬取过，则运行爬虫爬取今天数据
            if not data_logs or str(datetime.date.today()) not in str(product.last_crawled_at):
                # 如果今天还没爬，执行当天的爬虫程序
                # data = jd_main(product_url)
                data = GetData().get(product_url)
                # 在PriceLog表中保存商品数据
                PriceLog.objects.create(sku_id=product.id,
                                        price=data.get("price"),
                                        comment_number=data.get("comment_number"),
                                        applause_rate=data.get("applause_rate"),
                                        title=data.get("title"),
                                        store_rating=data.get("store_rating"),
                                        sales=data.get("sales"))
                # 在Sku表中更新商品最新爬取时间并修改商品最新标题
                Sku.objects.filter(id=product.id).update(title=data.get("title"),
                                                         shop_name=data.get("shop_name"),
                                                         last_crawled_at=datetime.datetime.now())
                # 获取商品历史数据
                data_logs = product.logs.all().values()
                # 返回数据
                return JsonResponse({"data": list(data_logs), "message": "成功"}, json_dumps_params={"ensure_ascii": False})
            # 如果今天爬过，返回数据
            else:
                # 返回数据
                return JsonResponse({"data": list(data_logs), "message": "成功"},
                                    json_dumps_params={"ensure_ascii": False})
        # 该商品不存在
        else:
            try:
                # 执行爬虫程序
                # data = jd_main(product_url)
                data = GetData().get(product_url)
                # 创建商品
                product = Sku.objects.create(url=product_url,
                                             title=data.get("title"),
                                             shop_name=data.get("shop_name"),
                                             last_crawled_at=datetime.datetime.now())
                # 添加用户查询记录
                QueryLog.objects.create(sku_id=product.id)
                # 如果爬虫返回错误，则返回错误信息
                if data.get("error"):
                    return JsonResponse({"message": "商品不存在"}, json_dumps_params={"ensure_ascii": False})
                # 保存数据
                PriceLog.objects.create(
                    sku_id=product.id,
                    price=data.get("price"),
                    comment_number=data.get("comment_number"),
                    applause_rate=data.get("applause_rate"),
                    title=data.get("title"),
                    store_rating=data.get("store_rating"),
                    sales=data.get("sales")
                )
                # 获取数据
                data_logs = product.logs.all().values()
                # print(data_logs)
                return JsonResponse({"data": list(data_logs), "message": "成功"}, json_dumps_params={"ensure_ascii": False})
            except Exception as e:
                print(e)
                return JsonResponse({"message": "数据获取失败", "error": str(e)},
                                    json_dumps_params={"ensure_ascii": False})

# api/product/query_log/
class QueryLogView(APIView):  # 管理员专用接口
    def get(self, request):  # 获取用户查询记录
        try:
            result = []  # 存储结果，最后返回给前端
            page = request.GET.get("page")  # 获取页码
            size = request.GET.get("page_size")  # 获取每页数量
            filter_uncrawled = request.GET.get("filter_uncrawled")
            keyword = request.GET.get("keyword")
            # 计算7天前的日期
            seven_days_ago = datetime.date.today() - datetime.timedelta(days=7)
            # 筛选7天内的记录
            query_logs = QueryLog.objects.filter(queried_at__gte=seven_days_ago)
            # 筛选未爬取的
            if filter_uncrawled:
                # 使用数据库查询筛选今天未爬取的数据
                query_logs = query_logs.exclude(
                    sku__last_crawled_at__date=datetime.date.today()
                )
            # 筛选关键词
            if keyword:
                query_logs = query_logs.filter(
                    Q(sku__title__icontains=keyword) |
                    Q(queried_at__date__icontains=keyword)
                )
            # 获取总条数
            length = query_logs.count()
            # 创建分页器
            paginator = Paginator(query_logs, size)
            # 获取当前页码数据
            paginators = paginator.get_page(page).object_list
            query_logs = list(paginators.values())
        except Exception as e:
            return JsonResponse({"message": "数据获取失败", "error": str(e)},
                                json_dumps_params={"ensure_ascii": False})
        # 遍历记录，判断当前记录今天是否已经抓取
        for log in query_logs:
            try:
                # 获取对应的商品
                sku = Sku.objects.get(id=log.get("sku_id"))
                log["url"] = sku.url
                log["title"] = sku.title
                log["shop_name"] = sku.shop_name
                if sku.last_crawled_at and sku.last_crawled_at.date() == datetime.date.today():
                    log["today_crawled"] = 1  # 1表示今天已经爬取
                else:
                    log["today_crawled"] = 0  # 0表示今天没有爬取
                result.append(log)
            except Sku.DoesNotExist:
                log["today_crawled"] = 0
                result.append(log)
        # print(result)
        return JsonResponse({"data": result, "totalLength": length}, json_dumps_params={"ensure_ascii": False})

    def post(self, request):  # 执行爬虫程序
        sku_id = request.data.get("sku_id")
        try:
            # 通过Sku表获取商品
            product = Sku.objects.get(id=sku_id)
            # 如果今天已经爬取过了
            if str(datetime.date.today()) in str(product.last_crawled_at):
                return JsonResponse({"message": "今天已经爬取过了"}, json_dumps_params={"ensure_ascii": False})
            # 执行爬虫程序
            # data = jd_main(url=product.url)
            data = GetData().get(product.url)
            if data.get("error"):
                return JsonResponse({"message": "执行出错，商品可能不存在"}, json_dumps_params={"ensure_ascii": False})
            # 保存数据
            PriceLog.objects.create(
                sku_id=product.id,
                price=data.get("price"),
                comment_number=data.get("comment_number"),
                applause_rate=data.get("applause_rate"),
                title=data.get("title"),
                store_rating=data.get("store_rating"),
                sales=data.get("sales")
            )
            # 在Sku表中更新商品最新爬取时间
            Sku.objects.filter(id=product.id).update(title=data.get("title"), shop_name=data.get("shop_name"), last_crawled_at=datetime.datetime.now())
            return JsonResponse({"message": "成功"}, json_dumps_params={"ensure_ascii": False})
        except Exception as e:
            return JsonResponse({"message": f"执行出错，商品不存在，错误信息为：{str(e)}"},
                                json_dumps_params={"ensure_ascii": False})

    def delete(self, request, id):  # 删除用户查询记录
        QueryLog.objects.filter(sku_id=id).delete()
        return JsonResponse({"message": "删除成功"}, json_dumps_params={"ensure_ascii": False})
