import datetime
import threading

import numpy as np
import pandas as pd
from loguru import logger

from apps.pdd.pdd.sync.sync_pdd_source import SyncPddSource
from components.config import WFQ_SOURCE_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB


class SyncOrderTask:
    def __init__(self, order_version="2025-01-25"):
        self._to_db = None
        self._local_db = None
        self._table_net_pdd_proxy_log = "net_pdd_proxy_log"
        self._table_net_pdd_shop_info = "net_pdd_shop_info_task"
        self._table_net_pdd_shop_info_task = "net_pdd_shop_info_task"
        self._table_source_pdd_shop_info = "source_net_pdd_shop_info"
        self._table_source_pdd_goods_detail = "source_result_pdd_goods_detail"
        self._table_remote_pdd_task = "remote_pdd_task"
        self.order_version = order_version
        self.order_version = self.to_db.find(
            f"SELECT distinct order_version from {self._table_remote_pdd_task} where channel = '' order by order_version desc ", to_json=True
        )[0]['order_version']
        print(self.order_version)

    @property
    def to_db(self):
        if self._to_db is None:
            self._to_db = MysqlDB(
                ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
                port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
                db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
                user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
                user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
            )
        return self._to_db

    @property
    def local_db(self):
        if self._local_db is None:
            self._local_db = MysqlDB(
                ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
                port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
                db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
                user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
                user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
            )
        return self._local_db

    def sync_once(self):
        sql = f"""select mall_id, msn, shop_name, out_state, opera_time, owner, out_create_time, last_crawler_time, channel from {self._table_net_pdd_shop_info_task} """
        print(sql)
        datas = self.to_db.find(sql, to_json=True)
        df = pd.DataFrame(data=datas)
        df["order_version"] = self.order_version
        df = df.replace({np.nan: None})
        rows = df.to_dict("records")
        self.local_db.add_batch_smart(self._table_remote_pdd_task, rows, update_columns=list(rows[0].keys()))
        # self.update_last_crawler_time_order_version_task()
        logger.info("sync success")

    def update_last_crawler_time_order_version_task(self, update_start="2024-12-25", update_end="2025-01-25"):
        sql = f"""select mall_id, shop_name, update_at as last_crawler_time, '{self.order_version}' as order_version  from {self._table_source_pdd_shop_info} where  (is_strict_all = 1 or crawler_goods_num >= 200) and  update_at <= '{update_end}' and mall_id in (select distinct mall_id from {self._table_remote_pdd_task})
        """
        print(sql)
        res = self.local_db.find(sql, to_json=True)
        last_crawler_time_df = pd.DataFrame(data=res)
        last_crawler_time_df = last_crawler_time_df.sort_values(by="last_crawler_time", ascending=False)
        last_crawler_time_df = last_crawler_time_df.drop_duplicates(subset=["mall_id"], keep="first")
        self.local_db.add_batch_smart(
            self._table_remote_pdd_task, last_crawler_time_df.to_dict("records"), update_columns=["last_crawler_time"]
        )

    def add_new_task_to_order_version_task(self, update_start, update_end):
        sql = f"""select mall_id, msn, shop_name, '{self.order_version}' as order_version,update_at as last_crawler_time from {self._table_source_pdd_shop_info} where update_at > '{update_start}' and update_at <= '{update_end}' and shop_status != '该店铺已失效'"""
        print(sql)
        new_datas = self.local_db.find(sql, to_json=True)
        all_mall_df = pd.DataFrame(data=new_datas)
        all_mall_df = all_mall_df.sort_values(by="last_crawler_time", ascending=False)
        all_mall_df.drop_duplicates(subset=["mall_id"], keep="first")
        all_mall_ids_str = ",".join([f"'{i}'" for i in all_mall_df["mall_id"].unique().tolist()])
        sql_pop = f"""select mall_id from {self._table_remote_pdd_task} where order_version = '{self.order_version}' and mall_id in ({all_mall_ids_str})"""
        pop_mall_ids = [i["mall_id"] for i in self.local_db.find(sql_pop, to_json=True)]
        new_datas_df = all_mall_df[~all_mall_df["mall_id"].isin(pop_mall_ids)]
        new_datas_df["out_state"] = 0
        logger.info(f"新增任务：{new_datas_df.shape}")
        new_datas_df = new_datas_df.replace({np.nan: None})
        rows = new_datas_df.to_dict("records")
        # self.local_db.add_batch_smart(self._table_remote_pdd_task, rows, update_columns=list(rows[0].keys()))

    def sync_clean(self, time_range):
        clean_mall_ids = self.to_db.find(
            f"select distinct mall_id from {self._table_net_pdd_proxy_log} where create_time > '{time_range}'",
            to_json=True,
        )
        clean_mall_ids_list = [i["mall_id"] for i in clean_mall_ids]
        # clean_mall_ids_list = ['660798502', ]
        logger.info(f"开始清洗订单 {len(clean_mall_ids_list)}")
        if not clean_mall_ids_list:
            return
        SyncPddSource().run_by_mall_ids(clean_mall_ids_list)
        clean_mall_ids_list_str = ",".join([f"'{i}'" for i in clean_mall_ids_list])

        sql = (
            f"select shop_name, mall_id, is_strict_all, crawler_goods_num, flag, update_at as out_create_time, '{self.order_version}' as order_version, page_goods_count, crawler_goods_num "
            f"from {self._table_source_pdd_shop_info} "
            f"where mall_id in ({clean_mall_ids_list_str}) and version >= '{str(time_range)[:10]}'"
        )
        complete_df = pd.DataFrame(data=self.local_db.find(sql, to_json=True))
        if complete_df.empty:
            logger.info("未查询到日志")
            return

        complete_df = complete_df.sort_values(by="out_create_time", ascending=False).drop_duplicates(
            subset=["mall_id"], keep="first"
        )
        src_status_ids_str = ",".join([f"'{i}'" for i in complete_df["mall_id"].unique().tolist()])
        src_status_df = pd.DataFrame(
            self.local_db.find(
                f"select mall_id, out_state from {self._table_remote_pdd_task} where mall_id in ({src_status_ids_str}) and order_version = '{self.order_version}'",
                to_json=True,
            )
        )
        if not src_status_df.empty:
            not_update_mall_df = src_status_df[src_status_df["out_state"] == 2]
        else:
            logger.info("当前版本无数据")
            return

        logger.info(f"已完成订单不更新：{not_update_mall_df.shape[0]} \n {not_update_mall_df.head(5)}")

        # 完成的不修改状态
        update_df = complete_df[~complete_df["mall_id"].isin(not_update_mall_df["mall_id"])]
        update_df = update_df[update_df['mall_id'].isin(src_status_df["mall_id"])]  # 避免更新其他版本的

        update_df_res = update_df.apply(self.check_is_done_task, axis=1).replace({np.nan: None})
        if not update_df_res.empty:
            rows = update_df_res[["mall_id", "order_version", "out_create_time", "out_state", "shop_name", "page_goods_count", "crawler_goods_num"]].to_dict(
                "records"
            )
            logger.info(
                f"更新状态：{update_df_res.shape[0]}\n{update_df_res[['mall_id', 'out_create_time', 'out_state']].head(5)}"
            )
            self.local_db.add_batch_smart(
                self._table_remote_pdd_task, rows, update_columns=["out_state", "out_create_time", "shop_name", "page_goods_count", "crawler_goods_num"]
            )

    def check_is_done_task(self, ser):
        if ser["flag"] == -1:
            ser["out_state"] = 2
        else:
            if ser["is_strict_all"] == 1:
                if not (ser["page_goods_count"] != ser["page_goods_count"]):
                    if ser["crawler_goods_num"] == ser["page_goods_count"]:
                        ser["out_state"] = 2
                    elif ser["crawler_goods_num"] < ser["page_goods_count"]:
                        if (ser['page_goods_count'] - ser["crawler_goods_num"]) / ser["page_goods_count"] <= 0.03:
                            ser["out_state"] = 2
                        else:
                            ser["out_state"] = 3
                    else:
                        ser["out_state"] = 3
                else:
                    ser["out_state"] = 2
            elif ser["is_strict_all"] == 0:
                ser["out_state"] = 3
                # if ser["crawler_goods_num"] is not None:
                #     if ser["crawler_goods_num"] >= 200:
                #         ser["out_state"] = 2
                #     else:
                #         ser["out_state"] = 3
            else:
                ser["out_state"] = 3
        return ser

    def add_no_mall_id(self):
        sql = """
        select mall_id
        from net_pdd_shop_info
        where mall_id not in (select mall_id
        from net_pdd_shop_info
        where mall_id in (select distinct net_pdd_proxy_log.mall_id from net_pdd_proxy_log))
        """
        datas = self.local_db.find(sql, to_json=True)
        if datas:
            rows = [{"mall_id": i["mall_id"], "out_state": 0, "order_version": self.order_version, "shop_name": "", "owner": "未检测"} for i in datas]
            batch_size = 1000
            for i in range(0, len(rows), batch_size):
                self.local_db.add_batch_smart(self._table_remote_pdd_task, rows[i:i + batch_size], update_columns=list(rows[0].keys()))


def clean():
    time_range = (datetime.datetime.now() - datetime.timedelta(minutes=10)).strftime("%Y-%m-%d %H:%M:%S")
    print(f"I'm running on thread {threading.current_thread()} {time_range}")
    SyncOrderTask().sync_clean(time_range=time_range)


def run_threaded(job_func):
    job_thread = threading.Thread(target=job_func)
    job_thread.start()


if __name__ == "__main__":
    # test = SyncOrderTask()
    # test.update_last_crawler_time_order_version_task(update_start="2024-12-25", update_end="2025-01-25")
    # test.sync_once()
    # test.add_no_mall_id()
    # test.add_new_task_to_order_version_task(update_start="2024-12-01", update_end="2025-12-25")
    # test.add_new_task_to_order_version_task(update_start="2024-11-01", update_end="2025-01-01")

    # SyncOrderTask().sync_clean(time_range="2025-02-13 00:00:00")

    import schedule
    import time
    time_range = (datetime.datetime.now() - datetime.timedelta(minutes=10)).strftime("%Y-%m-%d %H:%M:%S")
    print(f"I'm running on thread {threading.current_thread()} {time_range}")
    SyncOrderTask().sync_clean(time_range=time_range)

    schedule.every(3).minutes.do(run_threaded, clean)

    while True:
        schedule.run_pending()
        time.sleep(1)
