#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time   :2024/7/29 14:59
# @Author :王凯
# @File   :pdd_fav.py
# @Project:scrapy_spider

import concurrent.futures
import datetime
import json
from pathlib import Path

import pandas as pd
from loguru import logger
from retrying import retry
from tqdm import tqdm

from apps.pdd.pdd.clean.encrypt_utils import FileEncryptDecryptUtils
from components.config import WFQ_SOURCE_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB


class PddExport(FileEncryptDecryptUtils):

    def __init__(self, version=None):
        if version is None:
            version = datetime.datetime.now().strftime("%Y%m%d")
        self.version = version
        self._to_db = None
        self.shop_info_sql = """select version, platform_id, platform_name, mall_id, mall_name, company_name_src, taxpayer_id, company_name, month_sales, month_sales_money, total_sales, total_sales_money, percent_month_sales, percent_month_sales_money, percent_total_sales, percent_total_sales_money, goods_num, self_support, mall_url, mall_open_time, mall_open_time_year, mall_status, mall_end_time, mall_platform_classification, mall_business_classification, mall_scope, company_province, company_city, company_county, company_town, mall_location_code, mall_send_province, mall_send_city, mall_send_county, mall_send_location_code, mall_desc, mall_logo, mall_star, mall_label, mall_rating_text, mall_achievements_rank, mall_service_score, mall_details_label_list
        from result_pdd_shop_info where mall_id in ({}) and mall_name != ''"""
        self.good_info_sql = """select  version, company_name, mall_name, mall_id, goods_id, goods_name, price, sale_num, level_1, level_2, level_3, goods_rank, hd_url, market_price, normal_price, tag_list
        from result_pdd_goods_detail where mall_id in ({}) and mall_name != ''"""

    @property
    def to_db(self):
        if self._to_db is None:
            self._to_db = MysqlDB(
                ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
                port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
                db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
                user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
                user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
            )
        return self._to_db

    @retry(stop_max_attempt_number=5)
    @logger.catch
    def get_datas_from_db(self, sql):
        return self.to_db.find(sql, to_json=True)

    def get_mall_info(self, mall_ids, batch_size=1000):
        datas = []
        for idx, i in enumerate(range(0, len(mall_ids), batch_size)):
            batch_mall_ids_str = ",".join([f"'{i}'" for i in mall_ids[i:i + batch_size]])
            batch_data = self.get_datas_from_db(self.shop_info_sql.format(batch_mall_ids_str))
            datas.extend(batch_data)

        return pd.DataFrame(datas)

    def get_goods_info(self, mall_ids, batch_size=1000):
        datas = []
        for idx, i in enumerate(range(0, len(mall_ids), batch_size)):
            mall_ids_str = ",".join([f"'{i}'" for i in mall_ids[i:i + batch_size]])
            data = self.get_datas_from_db(self.good_info_sql.format(mall_ids_str))
            datas.extend(data)
        return pd.DataFrame(datas)

    @logger.catch
    def run_by_ids(self, mall_ids):
        df_mall = self.get_mall_info(mall_ids)
        df_goods = self.get_goods_info(mall_ids)
        return df_mall, df_goods

    def run_mul_by_ids(self, all_mall_ids, max_workers=10, batch_size=10000):
        df_malls = pd.DataFrame()
        df_goods = pd.DataFrame()
        with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
            future_to_result = {}
            for i in range(0, len(all_mall_ids), batch_size):
                mall_ids = all_mall_ids[i:i + batch_size]
                future_to_result.update({executor.submit(self.run_by_ids, mall_ids): mall_ids})

            for future in tqdm(
                concurrent.futures.as_completed(future_to_result),
                total=len(future_to_result),
            ):
                mall_ids = future_to_result[future]
                try:
                    df_mall, df_good = future.result()
                    df_malls = pd.concat([df_malls, df_mall], axis=0)
                    df_goods = pd.concat([df_goods, df_good], axis=0)
                except Exception as e:
                    logger.error(f"{mall_ids} 错误: {e}")

        path = Path(__file__).parent / datetime.datetime.now().strftime("%Y%m%d")
        path.mkdir(parents=True, exist_ok=True)
        df_malls.reset_index(drop=True)
        df_goods.reset_index(drop=True)
        df_malls.sort_values(by=["goods_num", "mall_open_time_year"], ascending=False, inplace=True)
        df_malls.drop_duplicates(subset=["mall_id"], inplace=True, keep="first")
        df_goods.drop_duplicates(subset=["mall_id", "goods_id"], inplace=True)

        # 每 batch_size 生成一个文件
        # 多线程生成

        with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
            future_to_result = {}

            for idx, i in enumerate(range(0, len(df_malls), batch_size), start=1):
                batch_mall = df_malls[i:i + batch_size]
                batch_good = df_goods[df_goods["mall_id"].isin(df_malls["mall_id"])]
                logger.info(f"{idx} 开始")
                future_to_result.update({executor.submit(self.gen_csv, batch_mall, batch_good, idx): idx})

            for future in tqdm(
                concurrent.futures.as_completed(future_to_result),
                total=len(future_to_result),
            ):
                idx = future_to_result[future]
                try:
                    future.result()
                except Exception as e:
                    logger.error(f"{idx} 错误: {e}")
                finally:
                    logger.info(f"{idx} 完成")

        self.encrypt_file()

    def gen_csv(self, df_malls, df_goods, idx):
        time_str = datetime.datetime.now().strftime("%Y%m%d")
        path = Path(__file__).parent / time_str
        path.mkdir(parents=True, exist_ok=True)
        encrypt_path = Path(__file__).parent / time_str / "encrypt"
        encrypt_path.mkdir(parents=True, exist_ok=True)

        mall_file_name = f"repay_plan_mall_{time_str}_{str(idx).zfill(4)}0000.csv"
        df_malls.to_csv(path / mall_file_name, index=False)
        df_malls = df_malls.reset_index(drop=True)
        this_goods_df = df_goods.reset_index(drop=True)

        if this_goods_df.shape[0] > 1048570:
            # 超过最大行数，分批写入
            for idx_j, j in enumerate(range(0, this_goods_df.shape[0], 1048570), start=1):
                goods_file_name = f"repay_plan_goods_{time_str}_{str(idx).zfill(4)+str(idx_j).zfill(4)}.csv"
                goods_res = this_goods_df[j:j + 1048570].reset_index(drop=True)
                if not goods_res.empty:
                    goods_res.to_csv(path / goods_file_name, index=False)
        else:
            goods_file_name = f"repay_plan_goods_{time_str}_{str(idx).zfill(4)}0000.csv"
            goods_res = df_goods[df_goods["mall_id"].isin(df_malls["mall_id"])].reset_index(drop=True)
            if not goods_res.empty:
                goods_res.to_csv(path / goods_file_name, index=False)

    def encrypt_file(self):
        time_str = datetime.datetime.now().strftime("%Y%m%d")
        path = Path(__file__).parent / time_str
        path.mkdir(parents=True, exist_ok=True)
        encrypt_path = Path(__file__).parent / time_str / "encrypt"
        encrypt_path.mkdir(parents=True, exist_ok=True)

        print("开始加密")
        for p in path.iterdir():
            if "repay_plan" in p.as_posix():
                self.encrypt(p, encrypt_path / p.name)
        print("加密完成")


def get_now_ids():
    df_old = pd.DataFrame(json.loads(Path("20241223.json").read_bytes().decode("utf-8")))
    df_old.columns = ["mall_id"]
    for p in Path(r"C:\Users\wk\PycharmProjects\scrapy_spider\apps\pdd\pdd\clean\20241230\20241230").iterdir():
        if "repay_plan_mall" in p.as_posix():
            df_old = pd.concat([df_old, pd.read_csv(p)])

    for p in Path(r"C:\Users\wk\PycharmProjects\scrapy_spider\apps\pdd\pdd\clean\20250106").iterdir():
        if "repay_plan_mall" in p.as_posix():
            df_old = pd.concat([df_old, pd.read_csv(p)])

    for p in Path(r"C:\Users\wk\PycharmProjects\scrapy_spider\apps\pdd\pdd\clean\20250113").iterdir():
        if "repay_plan_mall" in p.as_posix():
            df_old = pd.concat([df_old, pd.read_csv(p)])

    sql = "select mall_id from result_pdd_shop_info where mall_name != '' and (goods_num > 0 or mall_status = '该店铺正在上传商品' or mall_status = '该店铺已失效')"
    db = MysqlDB(
        ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
        port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
        db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
        user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
        user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
    )
    datas = db.find(sql, to_json=True)

    df = pd.DataFrame(data=datas)
    # 读取数据 去重 mall_id.csv
    print("历史数据：", df_old.shape)
    df.drop_duplicates(inplace=True, subset=["mall_id"], keep="last")
    print("全库数据：", df.shape)

    df["mall_id"] = df["mall_id"].astype("str")
    df_old["mall_id"] = df_old["mall_id"].astype("str")

    df = df[~df["mall_id"].isin(df_old["mall_id"])]
    print(df.shape)
    print(df.head())
    # import json
    # Path('20241223.json').write_text(json.dumps(df_old["mall_id"].tolist()))

    return df["mall_id"].tolist()


if __name__ == "__main__":
    ids = get_now_ids()
    PddExport().run_mul_by_ids(ids)
