#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: ct_operate_node.py 
@version:
@time: 2022/05/25 
"""
import time

from TapClientCore.T06_Tapdata_feature.v2.TPF08_custom_node.tpf_custom_node import TPFCustomNode
from TapClientCore.T07_CaseTemplate.v2.CT03_task.ct_task import CTTask, allure, logger


# 自定义节点
class CTTaskNode10(CTTask, TPFCustomNode):
    def tap_check_custom(self, source_info, source_table, target_info, sink_table, source_func):
        time.sleep(10)
        with allure.step('数据校验'):
            source_connector = self.init_connector(source_info)
            sink_connector = self.init_connector(target_info)
            if source_connector.database_type == sink_connector.database_type:
                source_result = self.tap_query(source_connector, table=source_table)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                logger.info(source_result)
                logger.info('source_transform')
                source_transform = source_func(source_result)
                logger.info(source_transform)
                logger.info(sink_result)
                assert source_transform == sink_result
            elif source_connector.database_type in ['kafka']:
                tmp_1 = []
                source_result = self.tap_query(source_connector, table=source_table)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                if sink_connector.database_type in ['mongodb']:
                    for j in sink_result:
                        tmp_2 = {}
                        for k in j:
                            from bson import Decimal128
                            from bson import ObjectId
                            if isinstance(j[k], Decimal128):
                                tmp_2[k] = int(j[k].to_decimal())
                            if isinstance(j[k], ObjectId):
                                tmp_2[k] = str(j[k])
                            else:
                                tmp_2[k] = j[k]
                        tmp_1.append(tmp_2)
                    logger.info(source_result)
                    logger.info('source_transform')
                    source_transform = source_func(source_result)
                    logger.info(source_transform)
                    logger.info(tmp_1)
                    assert source_transform == tmp_1
                elif sink_connector.database_type in ['es']:
                    logger.info(source_result)
                    logger.info('source_transform')
                    source_transform = source_func(source_result)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
                else:
                    for j in source_result:
                        tmp_2 = []
                        for i in sink_connector.tapclient_connector.TABLE_DESCRIPTION:
                            for k in j:
                                if i[0] == k:
                                    t = j[k]
                                    tmp_2.append(t)
                        tmp_1.append(tuple(tmp_2))
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
            elif source_connector.database_type in ['mongodb']:
                sink_result = self.tap_query(sink_connector, table=sink_table)
                tmp_1 = []
                if sink_connector.database_type in ['kafka']:
                    source_result = self.tap_query(source_connector, table=source_table, column=None)
                    for j in source_result:
                        tmp_2 = {}
                        for k in j:
                            from bson import Decimal128
                            from bson import ObjectId
                            if isinstance(j[k], Decimal128):
                                tmp_2[k] = int(j[k].to_decimal())
                            if isinstance(j[k], ObjectId):
                                tmp_2[k] = str(j[k])
                            else:
                                tmp_2[k] = j[k]
                        tmp_1.append(tmp_2)
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
                elif sink_connector.database_type in ['elasticsearch']:
                    source_result = self.tap_query(source_connector, table=source_table)
                    for j in source_result:
                        tmp_2 = {}
                        for k in j:
                            from bson import Decimal128
                            from bson import ObjectId
                            if isinstance(j[k], Decimal128):
                                tmp_2[k] = int(j[k].to_decimal())
                            if isinstance(j[k], ObjectId):
                                tmp_2[k] = str(j[k])
                            else:
                                tmp_2[k] = j[k]
                        tmp_1.append(tmp_2)
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
                else:
                    source_result = self.tap_query(source_connector, table=source_table, column=None)
                    for j in source_result:
                        tmp_2 = []
                        for i in sink_connector.tapclient_connector.TABLE_DESCRIPTION:
                            for k in j:
                                if i[0] == k:
                                    t = j[k]
                                    from bson import Decimal128
                                    if isinstance(j[k], Decimal128):
                                        t = int(j[k].to_decimal())
                                    from bson import ObjectId
                                    if isinstance(j[k], ObjectId):
                                        t = str(j[k])
                                    tmp_2.append(t)
                        tmp_1.append(tuple(tmp_2))
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
            elif sink_connector.database_type in ['mongodb', 'elasticsearch', 'kafka']:
                source_result = self.tap_query(source_connector, table=source_table)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                tmp_1 = []
                for j in sink_result:
                    tmp_2 = []
                    for i in source_connector.tapclient_connector.TABLE_DESCRIPTION:
                        for k in j:
                            if i[0] == k:
                                t = j[k]
                                from bson import Decimal128
                                if isinstance(j[k], Decimal128):
                                    t = int(j[k].to_decimal())
                                from bson import ObjectId
                                if isinstance(j[k], ObjectId):
                                    t = str(j[k])
                                tmp_2.append(t)
                    tmp_1.append(tuple(tmp_2))
                logger.info(source_result)
                logger.info('source_transform')
                source_transform = source_func(source_result)
                logger.info(source_transform)
                logger.info(sink_result)
                assert source_transform == sink_result
            else:
                source_result = self.tap_query(source_connector, table=source_table)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                logger.info(source_result)
                logger.info('source_transform')
                source_transform = source_func(source_result)
                logger.info(source_transform)
                logger.info(sink_result)
                assert source_transform == sink_result

    def tap_cdc_check_custom(self, source_info, source_table, source_where, target_info, sink_table, source_func):
        time.sleep(10)
        with allure.step('数据校验'):
            source_connector = self.init_connector(source_info)
            sink_connector = self.init_connector(target_info)
            if source_connector.database_type == sink_connector.database_type:
                source_result = self.tap_query(source_connector, table=source_table, where=source_where)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                logger.info(source_result)
                logger.info('source_transform')
                source_transform = source_func(source_result)
                logger.info(source_transform)
                logger.info(sink_result)
                assert source_transform == sink_result
            elif source_connector.database_type in ['kafka']:
                tmp_1 = []
                source_result = self.tap_query(source_connector, table=source_table, where=source_where)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                if sink_connector.database_type in ['mongodb']:
                    for j in sink_result:
                        tmp_2 = {}
                        for k in j:
                            from bson import Decimal128
                            from bson import ObjectId
                            if isinstance(j[k], Decimal128):
                                tmp_2[k] = int(j[k].to_decimal())
                            if isinstance(j[k], ObjectId):
                                tmp_2[k] = str(j[k])
                            else:
                                tmp_2[k] = j[k]
                        tmp_1.append(tmp_2)
                    logger.info(source_result)
                    logger.info('source_transform')
                    source_transform = source_func(source_result)
                    logger.info(source_transform)
                    logger.info(tmp_1)
                    assert source_transform == tmp_1
                elif sink_connector.database_type in ['es']:
                    logger.info(source_result)
                    logger.info('source_transform')
                    source_transform = source_func(source_result)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
                else:
                    for j in source_result:
                        tmp_2 = []
                        for i in sink_connector.tapclient_connector.TABLE_DESCRIPTION:
                            for k in j:
                                if i[0] == k:
                                    t = j[k]
                                    tmp_2.append(t)
                        tmp_1.append(tuple(tmp_2))
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
            elif source_connector.database_type in ['mongodb']:
                sink_result = self.tap_query(sink_connector, table=sink_table)
                tmp_1 = []
                if sink_connector.database_type in ['kafka']:
                    source_result = self.tap_query(source_connector, table=source_table, column=None,
                                                   where=source_where)

                    for j in source_result:
                        tmp_2 = {}
                        for k in j:
                            from bson import Decimal128
                            from bson import ObjectId
                            if isinstance(j[k], Decimal128):
                                tmp_2[k] = int(j[k].to_decimal())
                            if isinstance(j[k], ObjectId):
                                tmp_2[k] = str(j[k])
                            else:
                                tmp_2[k] = j[k]
                        tmp_1.append(tmp_2)
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
                elif sink_connector.database_type in ['elasticsearch']:
                    source_result = self.tap_query(source_connector, table=source_table, where=source_where)
                    for j in source_result:
                        tmp_2 = {}
                        for k in j:
                            from bson import Decimal128
                            from bson import ObjectId
                            if isinstance(j[k], Decimal128):
                                tmp_2[k] = int(j[k].to_decimal())
                            if isinstance(j[k], ObjectId):
                                tmp_2[k] = str(j[k])
                            else:
                                tmp_2[k] = j[k]
                        tmp_1.append(tmp_2)
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
                else:
                    source_result = self.tap_query(source_connector, table=source_table, where=source_where,
                                                   column=None)
                    for j in source_result:
                        tmp_2 = []
                        for i in sink_connector.tapclient_connector.TABLE_DESCRIPTION:
                            for k in j:
                                if i[0] == k:
                                    t = j[k]
                                    from bson import Decimal128
                                    if isinstance(j[k], Decimal128):
                                        t = int(j[k].to_decimal())
                                    from bson import ObjectId
                                    if isinstance(j[k], ObjectId):
                                        t = str(j[k])
                                    tmp_2.append(t)
                        tmp_1.append(tuple(tmp_2))
                    logger.info(tmp_1)
                    logger.info('source_transform')
                    source_transform = source_func(tmp_1)
                    logger.info(source_transform)
                    logger.info(sink_result)
                    assert source_transform == sink_result
            elif sink_connector.database_type in ['mongodb', 'elasticsearch', 'kafka']:
                source_result = self.tap_query(source_connector, table=source_table, where=source_where)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                tmp_1 = []
                for j in sink_result:
                    tmp_2 = []
                    for i in source_connector.tapclient_connector.TABLE_DESCRIPTION:
                        for k in j:
                            if i[0] == k:
                                t = j[k]
                                from bson import Decimal128
                                if isinstance(j[k], Decimal128):
                                    t = int(j[k].to_decimal())
                                from bson import ObjectId
                                if isinstance(j[k], ObjectId):
                                    t = str(j[k])
                                tmp_2.append(t)
                    tmp_1.append(tuple(tmp_2))
                logger.info(source_result)
                logger.info('source_transform')
                source_transform = source_func(source_result)
                logger.info(source_transform)
                logger.info(sink_result)
                assert source_transform == sink_result
            else:
                source_result = self.tap_query(source_connector, table=source_table, where=source_where)
                sink_result = self.tap_query(sink_connector, table=sink_table)
                logger.info(source_result)
                logger.info('source_transform')
                source_transform = source_func(source_result)
                logger.info(source_transform)
                logger.info(sink_result)
                assert source_transform == sink_result

    def CUSTOM_NODE_CREATE(self, task_name, data):
        with allure.step('删除之前创建的任务和自定义节点'):
            custom_setting1 = {'name': task_name + '_customNode'}
            custom_setting2 = {'name': task_name + '_customNode_patch'}
            custom_name1 = custom_setting1.get('name')
            custom_name2 = custom_setting2.get('name')
            self.delete_data_copy_development(task_name)
            self.delete_custom_node_by_name(custom_name1)
        # 自定义节点
        node_config = data.pop('node_config')
        with allure.step('创建自定义节点'):
            custom_node_info = self.create_custom_node_and_get_info(custom_setting1)

        custom_id = custom_node_info.get('customId')
        with allure.step('修改自定义接地name'):
            custom_node_info = self.update_custom_node_and_get_info(custom_name1, custom_setting2)
            custom_node_info = self.update_custom_node_and_get_info(custom_name2, custom_setting1)

        custom_id = custom_node_info.get('customId')

        with allure.step('自定义节点信息配置'):
            custom_node_setting = {
                **node_config,
                'custom_id': custom_id,
                'custom_name': custom_name1
            }
            logger.info(custom_node_setting)
            data['task_config']['node'] = custom_node_setting
        return data

    @staticmethod
    def source_transform(data):
        tmp = []
        for record in data:
            if type(record) is tuple:
                tmp2 = []
                for column in record:
                    if type(column) is str:
                        tmp2.append(column.replace('1234', '****'))
                    else:
                        tmp2.append(column)
                tmp3 = tuple(tmp2)
                tmp.append(tmp3)
            elif type(record) is dict:
                tmp2 = {}
                for column, value in record.items():
                    if type(value) is str:
                        tmp2.update({column: value.replace('1234', '****')})
                    else:
                        tmp2.update({column: value})
                tmp.append(tmp2)
        return tmp

    def TASK_CUSTOM_NODE_INITIAL(self, parallel_data):
        """
                全量不校验任务运行,并行执行任务
                :param parallel_data:
                :return:
                """
        data = None
        record_id = None
        task_name = None
        # 随机等待
        self.random_wait()
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            operate = data.pop('operate')
            operate_source = operate.get('source')
            source_info = operate_source.get('source_info')
            source_table = operate_source.get('table')
            operate_target = operate.get('target')
            target_info = operate_target.get('target_info')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table

            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            # CUSTOM_NODE_CREATE
            data = self.CUSTOM_NODE_CREATE(task_name, data)

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table

            source_where_column = operate_source.get('where')
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            # stats check
            _stats = operate.get('stats_check').get('initial')
            _stats_initial = _stats.get('insert')
            self.drop_target_table(_target)
            with allure.step('创建任务'):
                self.create_data_copy_development(data)
            # initial stats insert 校验
            with allure.step('任务状态等待'):
                self.scheduled_task_wait_it_complete(task_name)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )

            # check
            self.tap_check_custom(
                source_info,
                source_table,
                target_info,
                target_table,
                self.source_transform
            )
        finally:
            self.release_table(record_id)
            self.generate_task_url(task_name, data.get('task_type'))
            logger.info(self.task_url)
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass

    def TASK_CUSTOM_NODE_CDC(self, parallel_data):
        data = None
        # 随机等待
        self.random_wait()
        record_id = None
        source_delete = None
        task_name = None
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            # operate info
            operate = data.pop('operate')
            operate_source = operate.get('source')
            operate_target = operate.get('target')
            source_info = operate_source.get('source_info')
            target_info = operate_target.get('target_info')

            # ddl and dml
            source_table = operate_source.get('table')
            source_insert_column = operate_source.get('insert')
            source_update_column = operate_source.get('update')
            source_where_column = operate_source.get('where')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            # CUSTOM_NODE_CREATE
            data = self.CUSTOM_NODE_CREATE(task_name, data)

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_insert = _source.tapclient_connector.generate_insert(
                source_insert_column
            )
            source_update = _source.tapclient_connector.generate_update(
                source_update_column,
                source_where_column
            )
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )

            # stats check
            _stats = operate.get('stats_check').get('cdc')
            _stats_initial = _stats.get('initial')
            _stats_insert = _stats.get('insert')
            _stats_update = _stats.get('update')
            _stats_delete = _stats.get('delete')
            # target drop table
            self.drop_target_table(_target)
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            with allure.step('创建任务'):
                self.create_data_copy_development(data)
            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 增量任务等待时间
            logger.info('等待一段时间')
            time.sleep(180)
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            self.tap_cdc_check_custom(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table,
                self.source_transform
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_update,
                self.get_task_status,
                task_name,
                _stats_update
            )
            # check
            self.tap_cdc_check_custom(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table,
                self.source_transform
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_delete
            )
            # check
            self.tap_cdc_check_custom(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table,
                self.source_transform
            )
        except Exception as e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass

    def TASK_CUSTOM_NODE_INITIAL_CDC(self, parallel_data):
        data = None
        source_delete = None
        record_id = None
        _source = None
        _target = None
        task_name = None
        try:
            # 随机等待
            self.random_wait()
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            # operate info
            operate = data.pop('operate')
            operate_source = operate.get('source')
            operate_target = operate.get('target')
            source_info = operate_source.get('source_info')
            target_info = operate_target.get('target_info')
            # ddl and dml
            source_table = operate_source.get('table')
            source_insert_column = operate_source.get('insert')
            source_update_column = operate_source.get('update')
            source_where_column = operate_source.get('where')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            # CUSTOM_NODE_CREATE
            data = self.CUSTOM_NODE_CREATE(task_name, data)

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)

            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_insert = _source.tapclient_connector.generate_insert(
                source_insert_column
            )
            source_update = _source.tapclient_connector.generate_update(
                source_update_column,
                source_where_column
            )
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )

            # stats check
            _stats = operate.get('stats_check').get('initial+cdc')
            _stats_initial = _stats.get('initial')
            _stats_insert = _stats.get('insert')
            _stats_update = _stats.get('update')
            _stats_delete = _stats.get('delete')
            # target drop table
            self.drop_target_table(_target)
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            with allure.step('创建任务'):
                self.create_data_copy_development(data)
            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 全量阶段校验
            logger.info('initial check')
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            # check
            self.tap_check_custom(
                source_info,
                source_table,
                target_info,
                target_table,
                self.source_transform
            )
            # 增量阶段校验
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            self.tap_check_custom(
                source_info,
                source_table,
                target_info,
                target_table,
                self.source_transform
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_update,
                self.get_task_status,
                task_name,
                _stats_update
            )
            # check
            self.tap_check_custom(
                source_info,
                source_table,
                target_info,
                target_table,
                self.source_transform
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_delete
            )
            # check
            self.tap_check_custom(
                source_info,
                source_table,
                target_info,
                target_table,
                self.source_transform
            )
        except Exception as e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass
