"""
@Author: CheyenneLam
@Date: 2020-08-01 09:00:00
@LastEditTime: 2020-08-01 09:00:00
@LastEditors: CheyenneLam
@Description: 从loghub获取分发日志，分发binlog记录到各个数据表的binlog表中
@FilePath: \distribute_log.py
"""


from odps.df import DataFrame
from odps import ODPS
import random
from multiprocessing import Process
from odps import ODPS, options


def transferContent(content):
    if content is None:
        return None
    else:
        string = ""
        for c in content:
            if c == '"':
                string += '\\\"'
            elif c == "'":
                string += "\\\'"
            elif c == "\\":
                string += "\\\\"
            else:
                string += c
        return string


def run(tableNameLise, args, incTableName, system_table_info_dict):
    error_table = o.get_table('system_error_table')
    error_list = []
    typeDict = {
        'INT64': 'BIGINT',
        'int64': 'BIGINT',
        'float64': 'DOUBLE',
        'list<int64>': 'ARRAY<BIGINT>',
        'list<float64>': 'ARRAY<DOUBLE>',
        'list<string>': 'ARRAY<string>'
    }
    start_time = args.getStartTime()
    year = args.getYear()
    month = args.getMonth()
    day = args.getDay()
    for tableName in tableNameLise:
        try:
            if not (tableName in system_table_info_dict):
                continue
            # 是否为Mongo表
            table_info = system_table_info_dict[tableName]
            fullTableName = table_info.dw_full_table_name.strip()
            df = o.get_table(f'''{fullTableName}_binlog''').to_df()
            mongo_flag = False
            if table_info.data_base.strip() == 'mongo':
                mongo_flag = True
            print("dealing ", fullTableName)
            # map和array不能用distinct
            insertSql = ''
            hasComplicatedType = False
            for x in df.schema.columns:
                if x.name == 'before_image' or x.name == 'after_image' or x.name == 'operation_type':
                    insertSql += f'''{x.name}'''
                elif x.name == 'sequence_id':
                    insertSql += f'''CAST({x.name} as BIGINT)'''
                elif x.name == 'mdf_year':
                    insertSql += f''''{year}' as mdf_year'''
                elif x.name == 'mdf_month':
                    insertSql += f''''{month}' as mdf_month'''
                elif x.name == 'mdf_day':
                    insertSql += f''''{day}' as mdf_day'''
                else:
                    if x.name not in table_info.column_info:
                        raise Exception(fullTableName, "在column_info找不到", x.name)
                    column_type = table_info.column_info[x.name]
                    if column_type.strip().lower() == 'bytea':
                        insertSql += f'''cast(hex_decode(GET_JSON_OBJECT(inc.schema, '$.{x.name}')) as {typeDict.get(x.type.name.lower(), x.type.name).replace('list', 'ARRAY').replace('int64', 'BIGINT')} )  as {x.name}'''
                    elif x.type.name.find('list') != -1:
                        hasComplicatedType = True
                        if mongo_flag:
                            insertSql += '''FROM_JSON(GET_JSON_OBJECT(inc.schema, '$.{0}'), "{1}") as {2}'''.format(
                                x.name, typeDict.get(x.type.name.lower(), x.type.name), x.name)
                        else:
                            insertSql += '''CAST(SPLIT(REGEXP_EXTRACT(GET_JSON_OBJECT(inc.schema, '$.{0}'), "{1}"), ",") AS {2}) as {3}'''.format(
                                x.name, "\\{([^}]*)\\}", typeDict.get(x.type.name.lower(), x.type.name), x.name)

                    elif x.type.name == 'boolean':
                        insertSql += f'''DECODE(GET_JSON_OBJECT(inc.schema, '$.{x.name}'),
                            'TRUE', TRUE ,
                            'true', TRUE ,
                            'FALSE', FALSE ,
                            'false', FALSE ,
                            NULL
                        ) as {x.name}'''
                    elif x.type.name == 'datetime':
                        if mongo_flag:
                            insertSql += f'''cast(FROM_UTC_TIMESTAMP(REPLACE(REPLACE(GET_JSON_OBJECT(inc.schema, '$.{x.name}'), 'T', " "),'Z',''), 'Asia/Shanghai') as  {x.type.name.replace('INT64', 'BIGINT').replace('int64', 'BIGINT')} )  as  {x.name}'''
                        else:
                            insertSql += f'''cast(FROM_UTC_TIMESTAMP(REPLACE(GET_JSON_OBJECT(inc.schema, '$.{x.name}'),'+00', ''), 'Asia/Shanghai') as  {x.type.name.replace('INT64', 'BIGINT').replace('int64', 'BIGINT')} )  as  {x.name}'''
                    elif x.type.name == 'timestamp' and mongo_flag:
                        insertSql += f'''cast(FROM_UTC_TIMESTAMP(REPLACE(REPLACE(GET_JSON_OBJECT(inc.schema, '$.{x.name}'), 'T', " "),'Z',''), 'Asia/Shanghai') as  {x.type.name.replace('INT64', 'BIGINT').replace('int64', 'BIGINT')} )  as  {x.name}'''
                    else:
                        insertSql += f'''cast(GET_JSON_OBJECT(inc.schema, '$.{x.name}') as {typeDict.get(x.type.name.lower(), x.type.name).replace('list', 'ARRAY').replace('int64', 'BIGINT')} )  as {x.name}'''
                if df.schema.columns[len(df.schema.columns) - 1].name != x.name:
                    insertSql += '\n,'
            content = transferContent(tableName)
            sql = f'''insert into {fullTableName}_binlog PARTITION (mdf_year, mdf_month, mdf_day) SELECT  {"" if hasComplicatedType else "DISTINCT"} {insertSql} \n from {incTableName} inc where dt = {start_time} and table_name="{content}";'''
            print(f"""
            .
            .
            .
            """)
            # print(sql)
            i = o.execute_sql(sql)
            print(fullTableName, " log: ", i.get_logview_address())
        except Exception as e:
            print(f"dealing {fullTableName} error: {e} \n")
            err = error_table.new_record()
            err['node_name'] = "分发log"
            err['message'] = f'''dealing {fullTableName}, err message: {str(e)}'''
            err['job_id'] = args.getJobId()
            err['node_id'] = args.getNodeId()
            err['task_id'] = args.getTaskId()
            err['ctime'] = args.getCtime()
            error_list.append(err)
        else:
            print(f"success dealing {fullTableName}! \n")
    if len(error_list) > 0:
        with error_table.open_writer(partition=f'dt={args.getCtime()}', create_partition=True) as writer:
            writer.write(error_list)


def groupingByTableName(tableName):
    if tableName is None:
        return None
    else:
        return abs(hash(record.table_name)) % 3


class CArgs:
    def __init__(self, year, month, day, start_time, node_id, task_id, ctime, job_id):
        self.__year = year  # 变形为self._A__X
        self.__month = month
        self.__day = day
        self.__start_time = start_time
        self.__node_id = node_id
        self.__task_id = task_id
        self.__ctime = ctime
        self.__job_id = job_id

    def getYear(self):
        return self.__year

    def getMonth(self):
        return self.__month

    def getDay(self):
        return self.__day

    def getStartTime(self):
        return self.__start_time

    def getNodeId(self):
        return self.__node_id

    def getTaskId(self):
        return self.__task_id

    def getCtime(self):
        return self.__ctime

    def getJobId(self):
        return self.__job_id


if __name__ == '__main__':
    options.sql.settings = {'odps.stage.num': 3}
    year = args['year']
    month = args['month']
    day = args['day']
    start_time = args['startTime']

    node_id = args['node_id']  # 节点ID
    task_id = args['task_id']  # 实例ID
    ctime = args['ctime']  # 定时时间
    job_id = args['job_id']  # 流程ID
    args = CArgs(year, month, day, start_time, node_id, task_id, ctime, job_id)

    incTableName = 'partition_loghub_binlog_change_log'
    system_table_info_dict = {}
    system_table_sql = "select * from system_table_info "
    with o.execute_sql(system_table_sql).open_reader(tunnel=True, limit=False) as reader:
        for record in reader:
            system_table_info_dict[record.data_source_table.strip()] = record

    # 获取表中的 不同表名字
    sql = f"SELECT  DISTINCT table_name from {incTableName} where dt={start_time}"
    hash_zero = []
    hash_one = []
    hash_two = []
    # 本次查询取消限制
    with o.execute_sql(sql).open_reader(tunnel=True, limit=False) as reader:
        for record in reader:
            hashValue = groupingByTableName(record.table_name)
            if hashValue == 0:
                hash_zero.append(record.table_name)
            if hashValue == 1:
                hash_one.append(record.table_name)
            if hashValue == 2:
                hash_two.append(record.table_name)

    p1 = Process(target=run, args=(hash_zero, args, incTableName, system_table_info_dict,))  # 必须加,号
    p2 = Process(target=run, args=(hash_one, args, incTableName, system_table_info_dict,))
    p3 = Process(target=run, args=(hash_two, args, incTableName, system_table_info_dict,))

    p1.start()
    p2.start()
    p3.start()