#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: ct_task_base.py 
@version:
@time: 2022/07/26 
"""
from TapClientCore.T07_CaseTemplate.v2.CT03_task.ct_task import *


class CTTaskParallel(CTTask):

    def TASK_TEMPLATE_PARALLEL_INITIAL(self, parallel_data):
        """
                全量不校验任务运行,并行执行任务
                :param parallel_data:
                :return:
                """
        data = None
        record_id = None
        task_name = None
        source_info = None
        source_table = None
        target_info = None
        target_table = None
        task_record = None
        # 随机等待
        self.random_wait()
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            operate = data.pop('operate')
            operate_source = operate.get('source')
            source_info = operate_source.get('source_info')
            source_table = operate_source.get('table')
            operate_target = operate.get('target')
            target_info = operate_target.get('target_info')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            # 更新redis缓存键
            if data.get('task_config').get('config').get('redis'):
                data['task_config']['config']['redis']['nodeConfig']['cachePrefix'] = source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table

            source_where_column = operate_source.get('where')
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            # stats check
            _stats = operate.get('stats_check').get('initial')
            _stats_initial = _stats.get('insert')
            self.drop_target_table(_target)
            with allure.step('创建任务'):
                task_record = self.create_data_copy_development(data)
            # initial stats insert 校验
            with allure.step('任务状态等待'):
                self.scheduled_task_wait_it_complete(task_name)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            # check
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )
        except TaskError as e:
            self.exception_attach(task_name, e)
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )
        except Exception as e:
            self.exception_attach(task_name, e)
        finally:
            self.release_table(record_id)
            self.generate_task_url(task_name, data.get('task_type'))
            logger.info(self.task_url)
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
                self.task_log_all(task_record.get('id'))
            except Exception as e:
                pass

    def TASK_TEMPLATE_PARALLEL_CDC(self, parallel_data):
        data = None
        # 随机等待
        self.random_wait()
        record_id = None
        source_delete = None
        task_name = None
        source_info = None
        source_table = None
        source_where_column = None
        target_info = None
        target_table = None
        task_record = None
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            # operate info
            operate = data.pop('operate')
            operate_source = operate.get('source')
            operate_target = operate.get('target')
            source_info = operate_source.get('source_info')
            target_info = operate_target.get('target_info')

            # ddl and dml
            source_table = operate_source.get('table')
            source_insert_column = operate_source.get('insert')
            source_update_column = operate_source.get('update')
            source_where_column = operate_source.get('where')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            # 更新redis缓存键
            if data.get('task_config').get('config').get('redis'):
                data['task_config']['config']['redis']['nodeConfig']['cachePrefix'] = source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')
            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_insert = _source.tapclient_connector.generate_insert(
                source_insert_column
            )
            source_update = _source.tapclient_connector.generate_update(
                source_update_column,
                source_where_column
            )
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )

            # stats check
            _stats = operate.get('stats_check').get('cdc')
            _stats_initial = _stats.get('initial')
            _stats_insert = _stats.get('insert')
            _stats_update = _stats.get('update')
            _stats_delete = _stats.get('delete')
            # target drop table
            self.drop_target_table(_target)
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            with allure.step('创建任务'):
                task_record = self.create_data_copy_development(data)
            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 增量任务等待时间
            logger.info('等待一段时间')
            time.sleep(cdc_wait)
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            self.tap_cdc_check(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_update,
                self.get_task_status,
                task_name,
                _stats_update
            )
            # check
            self.tap_cdc_check(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_delete
            )
            # check
            self.tap_cdc_check(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table
            )
        except TaskError as e:
            self.exception_attach(task_name, e)
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )
        except Exception as e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            self.exception_attach(task_name, e)
        finally:
            self.release_table(record_id)
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
                self.task_log_all(task_record.get('id'))
            except Exception as e:
                pass

    def TASK_TEMPLATE_PARALLEL_INITIAL_CDC(self, parallel_data):
        data = None
        source_delete = None
        record_id = None
        _source = None
        _target = None
        task_name = None
        source_info = None
        source_table = None
        target_info = None
        target_table = None
        task_record = None
        try:
            # 随机等待
            self.random_wait()
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            # operate info
            operate = data.pop('operate')
            operate_source = operate.get('source')
            operate_target = operate.get('target')
            source_info = operate_source.get('source_info')
            target_info = operate_target.get('target_info')
            # connector
            _source = self.init_connector(source_info)
            _target = self.init_connector(target_info)
            # ddl and dml
            source_table = operate_source.get('table')
            source_insert_column = operate_source.get('insert')
            source_update_column = operate_source.get('update')
            source_where_column = operate_source.get('where')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            # 更新redis缓存键
            if data.get('task_config').get('config').get('redis'):
                data['task_config']['config']['redis']['nodeConfig']['cachePrefix'] = source_table
            task_name = data.get('task_config').get('config').get('name')

            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_insert = _source.tapclient_connector.generate_insert(
                source_insert_column
            )
            source_update = _source.tapclient_connector.generate_update(
                source_update_column,
                source_where_column
            )
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )

            # stats check
            _stats = operate.get('stats_check').get('initial+cdc')
            _stats_initial = _stats.get('initial')
            _stats_insert = _stats.get('insert')
            _stats_update = _stats.get('update')
            _stats_delete = _stats.get('delete')
            # target drop table
            self.drop_target_table(_target)
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            with allure.step('创建任务'):
                task_record = self.create_data_copy_development(data)
            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 全量阶段校验
            logger.info('initial check')
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            # check
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )  # 增量阶段校验
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )  # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_update,
                self.get_task_status,
                task_name,
                _stats_update
            )
            # check
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )  # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_delete
            )
            # check
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )
        except TaskError as e:
            self.exception_attach(task_name, e)
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )
        except Exception as e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            self.exception_attach(task_name, e)
        finally:
            self.release_table(record_id)
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
                self.task_log_all(task_record.get('id'))
            except Exception as e:
                pass


class CTTaskKafkaAndMQ(CTTask):

    def TASK_TEMPLATE_INITIAL_Kafka_And_MQ(self, parallel_data, gen_data):
        """
                全量不校验任务运行,并行执行任务
                :param parallel_data:
                :return:
                """
        data = None
        record_id = None
        task_name = None
        source_info = None
        source_table = None
        target_info = None
        target_table = None
        task_record = None
        # 随机等待
        self.random_wait()
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            operate = data.pop('operate')
            operate_source = operate.get('source')
            source_info = operate_source.get('source_info')
            source_table = operate_source.get('table')
            operate_target = operate.get('target')
            target_info = operate_target.get('target_info')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            # 更新redis缓存键
            if data.get('task_config').get('config').get('redis'):
                data['task_config']['config']['redis']['nodeConfig']['cachePrefix'] = source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_create = _source.tapclient_connector.generate_create(
                gen_data
            )

            with allure.step('kafka mq 写入数据为了全量'):
                delete_create_time = 2
                # 执行前删除或者消费掉消息队里数据
                _source.tapclient_connector.timeout = delete_create_time
                _source.tapclient_connector.drop(source_table)
                time.sleep(delete_create_time * 2)
                logger.info('mq kafka 全量数据插入')
                _source.tapclient_connector.exec(source_create)
            # stats check
            _stats = operate.get('stats_check').get('initial')
            _stats_initial = _stats.get('insert')
            self.drop_target_table(_target)
            with allure.step('创建任务'):
                task_record = self.create_data_copy_development(data)
            # initial stats insert 校验
            with allure.step('任务状态等待'):
                self.scheduled_task_wait_it_complete(task_name)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            # check
            self.tap_check_mq_kafka(
                [gen_data],
                target_info,
                target_table
            )
        except TaskError as e:
            self.exception_attach(task_name, e)
            self.tap_check(
                source_info,
                source_table,
                target_info,
                target_table
            )
        except AssertionError as e:
            self.exception_attach(task_name, e)
        except Exception as e:
            self.exception_attach(task_name, e)
        finally:
            self.release_table(record_id)
            self.generate_task_url(task_name, data.get('task_type'))
            logger.info(self.task_url)
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
                self.task_log_all(task_record.get('id'))
            except Exception as e:
                pass

    def TASK_TEMPLATE_CDC_Kafka_And_MQ(self, parallel_data, gen_data):
        data = None
        task_name = None
        # 随机等待
        self.random_wait()
        # 并行案例初始化
        with allure.step('获取可用表'):
            data = self.lock_and_get_table(parallel_data)
        record_id = data.get('id')

        # operate info
        operate = data.pop('operate')
        operate_source = operate.get('source')
        operate_target = operate.get('target')
        source_info = operate_source.get('source_info')
        target_info = operate_target.get('target_info')

        # ddl and dml
        source_table = operate_source.get('table')
        source_insert_column = operate_source.get('insert')
        source_update_column = operate_source.get('update')
        source_where_column = operate_source.get('where')
        target_table = operate_target.get('table')

        data['task_config']['config']['name'] += source_table
        # 更新redis缓存键
        if data.get('task_config').get('config').get('redis'):
            data['task_config']['config']['redis']['nodeConfig']['cachePrefix'] = source_table
        with allure.step('任务名获取'):
            task_name = data.get('task_config').get('config').get('name')
        # connector
        with allure.step('源数据库初始化'):
            _source = self.init_connector(source_info)
        with allure.step('目标数据库初始化'):
            _target = self.init_connector(target_info)
        _source.tapclient_connector.CURRENT_TABLE = source_table
        _target.tapclient_connector.CURRENT_TABLE = target_table
        source_create = _source.tapclient_connector.generate_create(
            gen_data
        )
        source_insert = _source.tapclient_connector.generate_insert(
            source_insert_column
        )
        source_update = _source.tapclient_connector.generate_update(
            source_update_column,
            source_where_column
        )
        source_delete = _source.tapclient_connector.generate_delete(
            source_where_column
        )
        with allure.step('kafka mq 写入数据为了全量'):
            delete_create_time = 2
            _source.tapclient_connector.timeout = delete_create_time
            _source.tapclient_connector.drop(source_table)
            time.sleep(delete_create_time * 2)
            logger.info('mq kafka 全量数据插入')
            _source.tapclient_connector.exec(source_create)
        # stats check
        _stats = operate.get('stats_check').get('cdc')
        _stats_initial = _stats.get('initial')
        _stats_insert = _stats.get('insert')
        # target drop table
        self.drop_target_table(_target)
        try:
            with allure.step('创建任务'):
                self.create_data_copy_development(data)
            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 增量任务等待时间
            logger.info('等待一段时间')
            time.sleep(cdc_wait / 6)
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # insert check
            source_check = [source_insert_column]
            self.tap_check_mq_kafka(
                source_check,
                target_info,
                target_table
            )
            logger.info('cdc update and check')
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_update,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            update_check = source_insert_column.copy()
            update_check.update(source_update_column)
            self.tap_check_mq_kafka(
                [update_check],
                target_info,
                target_table
            )
            logger.info('cdc delete and check')
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            self.tap_check_mq_kafka(
                source_check,
                target_info,
                target_table
            )
        except Exception as e:
            logger.info(e)
            assert False
        finally:
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass

    def TASK_TEMPLATE_INITIAL_CDC_Kafka_And_MQ(self, parallel_data, gen_data):
        data = None
        # 随机等待
        self.random_wait()
        # 并行案例初始化
        with allure.step('获取可用表'):
            data = self.lock_and_get_table(parallel_data)
        record_id = data.get('id')

        # operate info
        operate = data.pop('operate')
        operate_source = operate.get('source')
        operate_target = operate.get('target')
        source_info = operate_source.get('source_info')
        target_info = operate_target.get('target_info')
        # connector
        _source = self.init_connector(source_info)
        _target = self.init_connector(target_info)
        # ddl and dml
        source_table = operate_source.get('table')
        source_insert_column = operate_source.get('insert')
        source_update_column = operate_source.get('update')
        source_where_column = operate_source.get('where')
        target_table = operate_target.get('table')

        data['task_config']['config']['name'] += source_table
        # 更新redis缓存键
        if data.get('task_config').get('config').get('redis'):
            data['task_config']['config']['redis']['nodeConfig']['cachePrefix'] = source_table
        task_name = data.get('task_config').get('config').get('name')

        _source.tapclient_connector.CURRENT_TABLE = source_table
        _target.tapclient_connector.CURRENT_TABLE = target_table
        source_insert = _source.tapclient_connector.generate_insert(
            source_insert_column
        )
        source_create = _source.tapclient_connector.generate_create(
            gen_data
        )
        with allure.step('kafka mq 写入数据为了全量'):
            delete_create_time = 2
            _source.tapclient_connector.timeout = delete_create_time
            _source.tapclient_connector.drop(source_table)
            time.sleep(delete_create_time * 2)
            logger.info('mq kafka 全量数据插入')
            _source.tapclient_connector.exec(source_create)

        # stats check
        _stats = operate.get('stats_check').get('initial+cdc')
        _stats_initial = _stats.get('initial')
        _stats_insert = _stats.get('insert')
        # target drop table
        self.drop_target_table(_target)
        try:
            with allure.step('创建任务'):
                self.create_data_copy_development(data)
            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 全量阶段校验
            logger.info('initial check')
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            # check
            source_check = [gen_data]
            self.tap_check_mq_kafka(
                source_check,
                target_info,
                target_table
            )
            # 增量阶段校验
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            insert_check = source_check.copy()
            insert_check.append(source_insert_column)
            self.tap_check_mq_kafka(
                insert_check,
                target_info,
                target_table
            )

            # logger.info('cdc update and check')
            # source_insert_column.update(source_update_column)
            # source_update = _source.tapclient_connector.generate_update(
            #     source_update_column,
            #     source_where_column
            # )
            # source_delete = _source.tapclient_connector.generate_delete(
            #     source_where_column
            # )
            # _source.tapclient_connector.exec(source_update)
            # self.func_wait(
            #     self.get_task_stats_update,
            #     self.get_task_status,
            #     task_name,
            #     _stats_initial
            # )
            # # check
            # update_check = source_check.copy()
            # source_insert_column.update(source_update_column)
            # update_check.append(source_update_column)
            # self.tap_check_mq_kafka(
            #     update_check,
            #     target_info,
            #     target_table
            # )
            # logger.info('cdc delete and check')
            # _source.tapclient_connector.exec(source_delete)
            # self.func_wait(
            #     self.get_task_stats_delete,
            #     self.get_task_status,
            #     task_name,
            #     _stats_initial
            # )
            # # check
            # self.tap_check_mq_kafka(
            #     source_check,
            #     target_info,
            #     target_table
            # )
        except Exception as e:
            logger.info(e)
            assert False
        finally:
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass
