#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2023/10/11 16:49
# @Author  : 王凯
# @File    : __init__.py.py
# @Project : spider-man
import functools
import threading
import time

import pandas as pd
import psycopg2
from dbutils.pooled_db import PooledDB

from components.config import NET_ROBOT_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB
from utils.db.redisdb import RedisDB
from utils.es_company_tools import EsCompanyTools
from utils.logs import log, logger


def log_function_time(func):
    try:

        @functools.wraps(func)  # 将函数的原来属性付给新函数
        def calculate_time(*args, **kw):
            began_time = time.time()
            callfunc = func(*args, **kw)
            end_time = time.time()
            logger.debug(func.__name__ + " run time  = " + str(end_time - began_time))
            return callfunc

        return calculate_time
    except:
        log.debug("求取时间无效 因为函数参数不符")
        return func


class PgsqlDB(MysqlDB):
    def __init__(self, ip=None, port=None, db=None, user_name=None, user_pass=None, **kwargs):
        # super(PgsqlDB).__init__(ip, port, db, user_name, user_pass, **kwargs)
        try:
            self.connect_pool = PooledDB(
                creator=psycopg2,
                mincached=1,
                maxcached=100,
                maxconnections=100,
                blocking=True,
                ping=7,
                host=ip,
                port=port,
                user=user_name,
                password=user_pass,
                database=db,
            )

        except Exception as e:
            log.error(
                """
            连接失败：
            ip: {}
            port: {}
            db: {}
            user_name: {}
            user_pass: {}
            exception: {}
            """.format(
                    ip, port, db, user_name, user_pass, e
                )
            )
        else:
            log.debug("连接到pgsql数据库 %s : %s" % (ip, db))


class CleanCreditGrade:
    mysql_db_origin = MysqlDB(
        ip=NET_ROBOT_MYSQL_CONFIG["MYSQL_IP"],
        port=NET_ROBOT_MYSQL_CONFIG["MYSQL_PORT"],
        db=NET_ROBOT_MYSQL_CONFIG["MYSQL_DB"],
        user_name=NET_ROBOT_MYSQL_CONFIG["MYSQL_USER_NAME"],
        user_pass=NET_ROBOT_MYSQL_CONFIG["MYSQL_USER_PASS"],
    )
    TONGHUASHUN_CONFIG = {
        "ip": "server-forward.local.wfq2020.com",
        "db": "tonghuashun-datasync",
        "port": 55432,
        "user_name": "p.third-party-api.p.v1",
        "user_pass": "ba914871062365732c5eacf1c61daffd",
    }
    pgsql_db = PgsqlDB(
        ip=TONGHUASHUN_CONFIG["ip"],
        port=TONGHUASHUN_CONFIG["port"],
        db=TONGHUASHUN_CONFIG["db"],
        user_name=TONGHUASHUN_CONFIG["user_name"],
        user_pass=TONGHUASHUN_CONFIG["user_pass"],
    )
    origin_table = "net_credit_grade_a"
    target_table = "clean_credit_grade_a"
    redis_key = f"clean_task:{origin_table}:to:{target_table}"
    redis_db = RedisDB()

    def get_data(self, id_list: list) -> pd.DataFrame:
        id_list_str = ",".join([f'"{i}"' for i in id_list])
        sql = f"select company_name, taxpayer_id, year, province from {self.origin_table} where id in ({id_list_str})"
        return pd.DataFrame(self.mysql_db_origin.find(sql, to_json=True))

    def get_data_by_com(self, com_list):
        com_list_str = ",".join([f'"{i}"' for i in com_list])
        sql = (
            f"select company_name, taxpayer_id, year, province from {self.origin_table} where company_name in "
            f"({com_list_str})"
        )
        return pd.DataFrame(self.mysql_db_origin.find(sql, to_json=True))

    def get_com_tax_mapping(self, coms):
        try:
            com_mapping = EsCompanyTools().get_company_name_or_taxpayer_id(coms).get("taxpayer_id_mapping")
        except Exception:
            com_mapping = {}
            coms_str = ",".join([f"'{i}'" for i in coms])
            sql = (
                "select corp_name as company_name, unified_social_credit_code as taxpayer_id from enterprise_basic_info"
                " where isvalid=1 and unified_social_credit_code is not null and corp_name in ({coms_str}) union all "
                "select corp_name as company_name, unified_social_credit_code as taxpayer_id from individual_business_info"
                " where isvalid=1 and unified_social_credit_code is not null and corp_name in ({coms_str})"
            )
            if coms_str:
                data = self.pgsql_db.find(sql.format(coms_str=coms_str), to_json=True)
                for i in data:
                    if i["taxpayer_id"]:
                        com_mapping[i["company_name"]] = i["taxpayer_id"]
        return com_mapping

    @log_function_time
    def run(self, com_list):
        df = self.get_data_by_com(com_list)
        df = df[(df["year"].str.len() == 4) & (df["year"].str.isnumeric())]  # 去除年份不标准的数据
        coms = (
            df[
                (df["taxpayer_id"].str.len() != 18)
                | (df["taxpayer_id"].str.count(r"\*") > 1)
                | (df["taxpayer_id"].str.count("0") == df["taxpayer_id"].str.len())
                ]["company_name"]
            .unique()
            .tolist()
        )
        # 不符合标准的税号
        com_taxpayer_id_mapping = self.get_com_tax_mapping(coms)
        # fail_coms = set(coms) - set(com_mapping.keys())
        df["taxpayer_id"] = df["company_name"].map(com_taxpayer_id_mapping).fillna(df["taxpayer_id"])
        com_company_mapping = {}
        try:
            com_company_mapping = EsCompanyTools().get_company_name_or_taxpayer_id(coms).get("company_name_mapping")
        except Exception:
            pass
        df['company_name'] = df["company_name"].map(com_company_mapping).fillna(df["company_name"])
        df = df.drop_duplicates(subset=["company_name", "taxpayer_id", "year"])
        df = df.drop(columns=["province"])
        save_data = df.to_dict(orient="records")
        # 保存数据
        if save_data:
            self.mysql_db_origin.add_batch_smart(self.target_table, save_data, update_columns=["company_name"])

    def put_task(self):
        sql = f"select distinct company_name from {self.origin_table}"
        tasks = self.mysql_db_origin.find(sql=sql, to_json=True)
        batch_size = 1000
        for i in range(0, len(tasks), batch_size):
            sub_datas = tasks[i: i + batch_size]
            self.redis_db.sadd(self.redis_key, str(sub_datas))
            print(f"{i}/{len(tasks)}")

    def run_task(self):
        while True:
            res = self.redis_db.sget(self.redis_key)
            logger.info(f"redis_key:{self.redis_key} 剩余数据量：{self.redis_db.sget_count(self.redis_key)}")
            print(f"redis_key:{self.redis_key} 剩余数据量：{self.redis_db.sget_count(self.redis_key)}")
            if res:
                res = eval(res[0])
                res = [i["company_name"] for i in res]
                self.run(res)
            else:
                logger.info(f"redis_key:{self.redis_key} 数据已取完 任务完成")
                break

    def task_run(self, max_workers=10):
        for i in range(max_workers):
            threading.Thread(target=self.run_task, args=()).start()


if __name__ == "__main__":
    # CleanCreditGrade().run(
    #     [
    #         # "温岭市小奇弹簧有限公司",
    #         "山东舜翔（青岛）律师事务所",
    #         # "温州市丰台五金有限公司",
    #         # "温州市丰台五金有限公司",
    #         # "绍兴至味食品有限公司",
    #         # "浙江中金园林建设集团有限公司",
    #         # "台州欧代机械有限公司",
    #         # "杭州优众食品有限公司",
    #         # "广东虎门大桥有限公司",
    #         # "湖南省第六工程公司永州市人民医院传染科项目部",
    #     ]
    # )
    test = CleanCreditGrade()
    test.put_task()
    test.task_run()
