import datetime
import threading

from dateutil.relativedelta import relativedelta
from loguru import logger

from apps.pdd.pdd.clean.main_run import PDDCleanRun
from components.config import WFQ_SOURCE_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB
from utils.db.redisdb import RedisDB


class SLWDTask:
    def __init__(self):
        self._to_db = None
        self._redis_db = None

    @property
    def to_db(self):
        if self._to_db is None:
            self._to_db = MysqlDB(
                ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
                port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
                db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
                user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
                user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
            )
        return self._to_db

    @property
    def redis_db(self):
        if self._redis_db is None:
            self._redis_db = RedisDB(
                url="redis://:e63ceeb55115dbf72f4dcfdf44352604@server-forward.local.wfq2020.com:56383"
            )
        return self._redis_db

    def sync_datas(self, last_crawler_time):
        sql = f"""
        select  mall_id,
       msn,
       _sop_rcto,
       mall_goods_num,
       mall_sales,
       shop_name,
       mall_star,
       service_score,
       mall_details_label_list,
       shop_url,
       shop_status,
       is_all,
       flag,
       device_id,
       last_crawler_time,
       open_time_label,
       mall_desc,
       mall_logo,
       mall_label_volist,
       mall_rating_text_list,
       achievements_rank,
       review_num,
       now_crawler_page_num,
       out_state,
       opera_time,
       owner,
       out_create_time,
       channel,
       create_time,       update_time
        from net_pdd_shop_info
        where last_crawler_time > '{last_crawler_time}'
        """
        print(sql)
        datas = self.to_db.find(sql, to_json=True)
        batch_size = 1000
        length = len(datas)
        for i in range(0, length, batch_size):
            batch_datas = datas[i:i + batch_size]
            update_columns = [i for i in batch_datas[0].keys() if i not in ["mall_id"]]
            self.to_db.add_batch_smart("net_pdd_shop_info_task", batch_datas, update_columns=update_columns)
            logger.info(f"{i+batch_size}/{length} 条数据同步完成")

    def enqueue_last_month_today(self, today=None):
        if today is None:
            today = datetime.date.today()
        # 每天入上个月的当天检测的单
        start_time = today - relativedelta(months=1)
        end_time = today - relativedelta(months=1) + relativedelta(days=1)
        sql = (
            f"select mall_id,  case WHEN UNIX_TIMESTAMP(last_crawler_time) is null THEN 0 WHEN UNIX_TIMESTAMP(last_crawler_time) THEN UNIX_TIMESTAMP(last_crawler_time) END as score "
            f"from net_pdd_shop_info_task where last_crawler_time > '{start_time}' and last_crawler_time < '{end_time}' and channel='' and out_state !=2"
        )
        print(sql)
        datas = self.to_db.find(sql, to_json=True)
        logger.info(f"上次采集时间在 {start_time} 至 {end_time} | {len(datas)} 条数据 且未完成 入单完成")
        # todo 入redis
        self.redis_db._redis.zadd("net_pdd_shop_info_task", {i["mall_id"]: i["score"] for i in datas})

    def throw_out_5_days_ago_datas(self):
        # 每天将（当前时间-入单时间>=5天）且状态是已认领和待认领的单丢弃掉
        today = datetime.date.today()
        last_5_days_ago = today - relativedelta(months=1) - relativedelta(days=5)
        last_5_days_ago_timestamp = int(datetime.datetime.strptime(str(last_5_days_ago), "%Y-%m-%d").timestamp())
        # < last_5_days_ago_timestamp 删除 待认领的丢弃掉
        res = self.redis_db.zremrangebyscore("net_pdd_shop_info_task", 0, last_5_days_ago_timestamp)
        logger.info(f"{last_5_days_ago} 至 {today} | {res} 条待认领数据丢弃完成")

    def throw_out_5_days_ago_datas_by_mysql(self):
        # 已认领的丢弃掉
        today = datetime.date.today()
        last_5_days_ago_sql_time = (today - relativedelta(days=5)).strftime("%Y-%m-%d")
        sql = f"select mall_id, opera_time, out_create_time from net_pdd_shop_info_task where opera_time <= '{last_5_days_ago_sql_time}' and out_state=1 and channel=''"
        datas = self.to_db.find(sql, to_json=True)
        mall_ids_str = ",".join([f"'{i['mall_id']}'" for i in datas])
        logger.info(f"领取时间在 历史 至 {last_5_days_ago_sql_time} | {len(datas)} 条数据已认领,但未完成 丢弃掉")
        sql = f"""update net_pdd_shop_info_task set out_state  = 0, opera_time = null, owner = null where mall_id in ({mall_ids_str}) and channel=''"""
        # self.to_db.execute(sql)

    def every_day_run(self):
        self.enqueue_last_month_today()
        self.throw_out_5_days_ago_datas()
        # self.throw_out_5_days_ago_datas_by_mysql()


def clean():
    time_range = (datetime.datetime.now() - datetime.timedelta(minutes=10)).strftime("%Y-%m-%d %H:%M:%S")
    print(f"I'm running on thread {threading.current_thread()} {time_range}")
    PDDCleanRun().run_by_redis(time_range=time_range)


def run_threaded(job_func):
    job_thread = threading.Thread(target=job_func)
    job_thread.start()


if __name__ == "__main__":
    test = SLWDTask()
    # test.sync_datas("2025-01-01")
    # test.every_day_run()
    # test.enqueue_last_month_today()
    # test.throw_out_5_days_ago_datas()

    time_range = (datetime.datetime.now() - datetime.timedelta(minutes=40)).strftime("%Y-%m-%d %H:%M:%S")
    print(f"I'm running on thread {threading.current_thread()} {time_range}")
    PDDCleanRun().run_by_redis(time_range=time_range)

    import schedule
    import time

    schedule.every(3).minutes.do(run_threaded, clean)
    schedule.every().day.at("00:10").do(test.every_day_run)

    while True:
        schedule.run_pending()
        time.sleep(1)
