import json
from collections import defaultdict
from itertools import groupby
from operator import itemgetter

import aiomysql
from re_common.baselibrary.tools.all_requests.aiohttp_request import AioHttpRequest
from re_common.baselibrary.tools.all_requests.mrequest import MRequest
from re_common.baselibrary.utils.core.mdeprecated import retry_func_async, try_except2_async

from apps.crawler_platform.core_api.models import UpdateSqlModel, AddManyModel
from apps.crawler_platform.core_platform.core_g import CoreSqlValue, SQLTable, StateNode
from apps.crawler_platform.core_platform.g_model import InputPlatformModel
from settings import URLDISTRIBUTED

"""
过api执行的操作
"""


class CoreSqlMixin(object):

    @staticmethod
    @retry_func_async(retry_times=3, sleep_time=2)
    @try_except2_async(is_print=True)
    async def update_sql(table, update_dict, update_no_placeholder, where_dict):
        url = await URLDISTRIBUTED.get_sql_update_url()
        data = InputPlatformModel[UpdateSqlModel](
            data=UpdateSqlModel(
                table=table,
                update=update_dict,
                update_no_placeholder=update_no_placeholder,
                where=where_dict
            )
        ).json()
        rrq = AioHttpRequest()
        rrq.set_url(url) \
            .set_timeout(30) \
            .set_data(data) \
            .set_middler_list(
            [rrq.is_null_html_middlerwares, rrq.status_code_middlerwares])
        bools, dicts = await rrq.run(MRequest.POST)
        if bools:
            dicts = json.loads(rrq.html)
            if dicts["status"] == 'FAILED':
                bools = False
        return bools, dicts

    @staticmethod
    @retry_func_async(retry_times=3, sleep_time=2)
    @try_except2_async(is_print=True)
    async def insert_many_sql(table, feild_list, value_list, insert=CoreSqlValue.insert_ig_it):
        url = await URLDISTRIBUTED.get_sql_add_many_url()
        data = InputPlatformModel[AddManyModel](
            data=AddManyModel(
                table=table,
                feild_list=feild_list,
                value_list=value_list,
                insert=insert,
            )
        ).json()
        rrq = AioHttpRequest()
        rrq.set_url(url) \
            .set_timeout(30) \
            .set_data(data) \
            .set_middler_list(
            [rrq.is_null_html_middlerwares, rrq.status_code_middlerwares])
        bools, dicts = await rrq.run(MRequest.POST)
        if bools:
            dicts = json.loads(rrq.html)
            if dicts["status"] == 'FAILED':
                bools = False
        return bools, dicts

    @staticmethod
    async def get_homelist(task_name, task_tag, home_rawid,page_index):
        from main import app
        sql = f"select * from {SQLTable.journal_home} where task_name='{task_name}' and task_tag='{task_tag}' and home_rawid='{home_rawid}' and page_index={page_index}"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row

    # 不通过api执行的操作,量小 比较有保证且不需要分布式分担压力
    @staticmethod
    async def get_journallist(task_name, task_tag, journal_rawid):
        from main import app
        sql = f"select * from {SQLTable.journal_list} where task_name='{task_name}' and task_tag='{task_tag}' and journal_rawid='{journal_rawid}'"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row

    @staticmethod
    async def get_issuelist(task_name, task_tag, journal_rawid, pub_year, num):
        from main import app
        sql = f"select * from {SQLTable.journal_issue} where task_name='{task_name}' and task_tag='{task_tag}' and journal_rawid='{journal_rawid}' and pub_year='{pub_year}' and num='{num}'"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row

    @staticmethod
    async def get_article(task_name, task_tag, rawid):
        from main import app
        sql = f"select * from {SQLTable.journal_article} where task_name='{task_name}' and task_tag='{task_tag}' and rawid='{rawid}'"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row

    @staticmethod
    async def get_etl_article():
        from main import app
        sql = f"select * from {SQLTable.journal_article} where  is_true=1"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row

    @staticmethod
    async def save_taskinfo(taskinfo):
        """
        执行 taskinfo的sql
        :param taskinfo:
        :return:
        """
        sql = f"replace into {SQLTable.taskinfo}(task_name,task_tag,task_tag_next,order_num,url,req_type,`groups`,`funcs`," \
              "resp_middler,headers,params,marks,parse_rules," \
              "validate_rules,timeout,turn_page,post_data,state_key,post_data_is_json)values(%s,%s,%s,%s,%s," \
              "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
        from main import app
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor() as cur:
                await cur.execute(sql, (
                    taskinfo["task_name"], taskinfo["task_tag"], taskinfo["task_tag_next"], taskinfo["order_num"],
                    taskinfo["url"], taskinfo["req_type"],
                    taskinfo["groups"], taskinfo["funcs"], taskinfo["resp_middler"], taskinfo["headers"],
                    taskinfo["params"],
                    taskinfo["marks"], taskinfo["parse_rules"],
                    taskinfo["validate_rules"], taskinfo["timeout"], taskinfo["turn_page"],
                    taskinfo["post_data"], taskinfo["state_key"], taskinfo["post_data_is_json"]))
                await conn.commit()

    @staticmethod
    async def update_taskinfo(sql):
        from main import app
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor() as cur:
                await cur.execute(sql)
                await conn.commit()

    @staticmethod
    async def save_taskinfo_set(taskset):
        sql = f"replace into {SQLTable.taskinfo_save_setting}(task_name,task_tag,id_list,nosql_table,history_cnt, task_state,tag_state,table_name,fianl_stat, proxy_ctrl)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
        from main import app
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor() as cur:
                await cur.execute(sql, (
                    taskset["task_name"], taskset["task_tag"], taskset["id_list"], taskset["nosql_table"],
                    taskset["history_cnt"], taskset["task_state"], taskset["tag_state"], taskset["table_name"],
                    taskset["fianl_stat"], taskset["proxy_ctrl"]))
                await conn.commit()

    @staticmethod
    async def insert_errinfo(task_name, task_tag, rawid, batch, err_msg, tag):
        sql = f"insert ignore into {SQLTable.err_except_data}(err_type, task_name, task_tag, batch, err_str, type_name, state) values(%s, %s, %s, %s, %s, %s, 0)"
        err_type = "_".join([task_name, task_tag, rawid])
        from main import app
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor() as cur:
                await cur.execute(sql, (
                    err_type, task_name, task_tag, batch, err_msg,
                    f"server_{tag}_failed"))
                await conn.commit()

    ##################################### 以上为基础操作################################

    @staticmethod
    def set_state(pm):
        """
        生成多叉树解决打标问题
        """
        lists = []
        # 取出同一个 taskname 和 tasktag 的所有配置
        if not hasattr(pm, "redis_model"):
            raise Exception("pm 不存在redis_model")
        for keys in pm.redis_model.parse_dict.keys():
            one_para = pm.redis_model.parse_dict[keys]
            state_key = one_para.task_info.state_key
            state_dicts = eval(state_key)
            dicts = {
                # 状态键 是数据库的数据表的一个字段名
                # state_key的键代表成功的状态键，值代表失败的状态键
                "g_key": state_dicts["last_key"],
                "state_dicts": state_dicts,
                # keys 由taskinfo 表的groups 和order组成
                "keys": keys
            }
            lists.append(dicts)
        # 按照state_key排序
        lists.sort(key=itemgetter('g_key'))
        # 按照state_key 分组
        lstg = groupby(lists, itemgetter('g_key'))
        # 顶节点 没有实际意义
        top_node = StateNode(node_type="top")
        for g_key, group in lstg:
            is_true_node = StateNode("is_true", g_key, list(group))
            # 顶节点下一层是最终状态节点
            top_node.add_child(is_true_node)
        # 循环最终状态节点

        for is_true_node in top_node.get_child():
            lists = []
            # 获取最终状态节点的信息
            for item in is_true_node.get_l_d_info():
                # 对 单个状态进行分组
                item["g_key"] = item["state_dicts"]["s_key"]
                lists.append(item)
                # 按照state_key排序
            lists.sort(key=itemgetter('g_key'))
            # 按照state_key 分组
            lstg = groupby(lists, itemgetter('g_key'))

            for g_key, group in lstg:
                state_node = StateNode("state", g_key, list(group))
                # 顶节点下一层是最终状态节点
                is_true_node.add_child(state_node)

        top_node.get_rsult(pm)

    # @classmethod
    # async def get_task(cls):
    #     from main import app
    #     sql = "SELECT task_name FROM `taskinfo` GROUP BY task_name"
    #     pool = app.state.pool
    #     async with pool.acquire() as conn:
    #         async with conn.cursor() as cur:
    #             await cur.execute(sql)
    #             row = await cur.fetchall()
    #     return row


# ############################## 以下 paper
    @staticmethod
    async def get_paper_start(task_name, task_tag, year, months):
        from main import app
        sql = f"select * from {SQLTable.paper_days} where task_name='{task_name}' and task_tag='{task_tag}' and year='{year}' and months = {months}"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row

    @staticmethod
    async def get_paper_days(task_name, task_tag, year, months, days):
        from main import app
        sql = f"select * from {SQLTable.paper_days} where task_name='{task_name}' and task_tag='{task_tag}' and year='{year}' and months = {months} and days = {days}"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row


    @staticmethod
    async def get_paper_page(task_name, task_tag, rawid_page):
        from main import app
        sql = f"select * from {SQLTable.paper_page} where task_name='{task_name}' and task_tag='{task_tag}' and rawid_page='{rawid_page}'"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row


    @staticmethod
    async def get_paper_article(task_name, task_tag, rawid):
        from main import app
        sql = f"select * from {SQLTable.paper_article} where task_name='{task_name}' and task_tag='{task_tag}' and rawid='{rawid}'"
        pool = app.state.pool
        async with pool.acquire() as conn:
            async with conn.cursor(aiomysql.DictCursor) as cur:
                await cur.execute(sql)
                row = await cur.fetchone()
        return row