#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/10/24 15:13
# @Author  : 王凯
# @File    : fix_task.py
# @Project : scrapy_spider
import datetime
import json
import re
import concurrent.futures
import parsel
from loguru import logger
import pandas as pd
from components.config import WFQ_SOURCE_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB


class CleanToTask:

    def __init__(self):
        self._to_db = None

    @property
    def to_db(self):
        if self._to_db is None:
            self._to_db = MysqlDB(
                ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
                port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
                db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
                user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
                user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
            )
        return self._to_db

    def run(self):
        sql = "select mall_id from net_pdd_proxy_log where create_time  > '2024-10-01' and JSON_UNQUOTE(JSON_EXTRACT(response, '$.has_more')) = 'false' group by mall_id"
        mall_ids = self.to_db.find(sql)
        print(len(mall_ids))
        # 29052
        # 36987
        batch_size = 100
        with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
            for i in range(0, len(mall_ids), batch_size):
                sub_mall_ids = mall_ids[i:i + batch_size]
                executor.submit(self.run_by_ids, sub_mall_ids)

    def run_by_ids(self, mall_ids):
        mall_ids_str = ",".join([f'"{i["mall_id"]}"' for i in mall_ids])
        res = self.to_db.update_smart("net_pdd_shop_info", {"last_crawler_time": "2024-10-01 00:00:00"}, f"mall_id in ({mall_ids_str}) and last_crawler_time is null")
        print(res)

        #
        # sql = f"select * from net_pdd_shop_info where mall_id in ({mall_ids_str}) and last_crawler_time is null"
        # datas = self.to_db.find(sql)
        # for data in datas:
        #     mall_id = data["mall_id"]
        #     update_date = data["update_date"]
        #     df_pro = pd.DataFrame([data])
        #     df_pro["update_date"] = update_date
        #     df_pro["update_date"] = df_pro["update_date"].apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S"))
        #     df_pro["update_date"] = df_pro["update_date"].apply(lambda x: x.strftime())

    def run_fix(self):
        sql = "select mall_id from net_pdd_shop_info where last_crawler_time is not null"
        datas = self.to_db.find(sql, to_json=True)
        batch_size = 100
        with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
            for i in range(0, len(datas), batch_size):
                sub_datas = datas[i:i + batch_size]
                executor.submit(self.run_fix_by_ids, sub_datas)

    @logger.catch
    def run_fix_by_ids(self, datas):
        all_mall_ids = [i["mall_id"] for i in datas]
        mall_ids_str = ",".join([f'"{i["mall_id"]}"' for i in datas])
        sql = f"select mall_id from net_pdd_proxy_log where mall_id in ({mall_ids_str}) group by mall_id"
        new_datas = self.to_db.find(sql, to_json=True)
        mall_ids = [i["mall_id"] for i in new_datas]

        not_mall_ids = set(all_mall_ids) - set(mall_ids)
        if not_mall_ids:
            print(not_mall_ids)
            tmp_str = ",".join([f'"{i}"' for i in not_mall_ids])
            self.to_db.update_smart("net_pdd_shop_info", {"last_crawler_time": None, "flag": None}, f"mall_id in ({tmp_str})")


if __name__ == '__main__':
    CleanToTask().run_fix()
