#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: ctv2_dataflow.py 
@version:
@time: 2022/03/14 
"""
import random
import time
from pprint import pprint

import allure
from TapException.PARALLEL import *
from TapClientCore.T01_init_conf.v2_init.init_config import timeout_config, MFA, parallel_record_collection
from TapClientConnector import TapClientConnector
from TapClientCore.T06_Tapdata_feature.v2.TPF02_dataflows.tpf_dataflow_v2 import DFSTPFDataFlow, logger


class CTV2DataFlow(DFSTPFDataFlow):

    @staticmethod
    def random_wait():
        _ = random.randint(1, 10)
        time.sleep(_)

    @staticmethod
    def lock_and_get_table(data):
        _table = ''
        # 获取数据源配置信息的name
        db_name = data.get('operate').get('source').get('source_info').get('name')
        parallel_use_record_name = data.get('parallel_use_record_name')
        if parallel_use_record_name:
            parallel_record = MFA.query(
                parallel_record_collection,
                {
                    'name': parallel_use_record_name,
                    'is_use': '0',
                    'db_name': db_name
                }
            )
            if parallel_record:
                _record = parallel_record[0]
                _id = _record.get('_id')
                _name = _record.get('name')
                _number = _record.get('number')
                _table = _record.get('table')
                data['dataflow_config']['relationship']['syncObjects']['objectNames'] = [_table]
                data['operate']['source']['table'] = _table
                data['operate']['target']['table'] = _table
                MFA.update(
                    parallel_record_collection,
                    {'_id': _id},
                    {'$set': {'is_use': '1'}}
                )
                data['id'] = _id
                return data
            else:
                raise NotCanUseRecord
        else:
            logger.info('not parallel execute')
            return data

    @staticmethod
    def release_table(_id):
        if _id:
            MFA.update(
                parallel_record_collection,
                {'_id': _id},
                {'$set': {'is_use': '0'}}
            )
            logger.info(f'release {_id} record')

    @staticmethod
    def init_connector(db_info):
        return TapClientConnector(db_info)

    @staticmethod
    def func_wait(func1, func2, args, compare, timeout=timeout_config):
        logger.info(func1.__name__)
        for i in range(timeout):
            time.sleep(5)
            result1 = func1(args)
            logger.info(f'RESULT is {result1} ')
            result2 = func2(args)
            if result2 == 'error':
                assert False
            if result1 > compare:
                assert False
            if result1 == compare:
                assert True
                # stats 正确后等待时间
                time.sleep(5)
                return
        raise TimeoutError

    @staticmethod
    def drop_target_table(connector: TapClientConnector):
        logger.info('开始进入目标表删除阶段')
        table = connector.tapclient_connector.CURRENT_TABLE
        for raw_table in connector.tapclient_connector.show_tables():
            logger.info(raw_table)
            if table == raw_table:
                logger.info(f'发现目标要删除的表{table}')
                drop_operate = connector.tapclient_connector.generate_drop(table)
                connector.tapclient_connector.exec(drop_operate)
                logger.info(f'删除目标要同步的表{table}')
                return
        logger.info(f'没有发现目标要删除的表{table}')

    def DFS_DATA_COPY_TEMPLATE_INITIAL(self, data):
        """
        全量不校验任务运行
        :param data:
        :return:
        """
        dataflow_name = data.get('dataflow_config').get('config').get('name')

        operate = data.pop('operate')
        operate_target = operate.get('target')
        target_info = operate_target.get('target_info')
        target_table = operate_target.get('table')

        # connector
        _target = self.init_connector(target_info)
        _target.tapclient_connector.CURRENT_TABLE = target_table
        # stats check
        _stats = operate.get('stats_check').get('initial')
        _stats_initial = _stats.get('initial')
        self.drop_target_table(_target)
        try:
            self.create_data_copy(data)
            self.dataflow_start_wait_stop(dataflow_name)
            # initial stats insert 校验
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_initial
            )
        except Exception as e:
            logger.fatal(e)
            assert False
        finally:
            if self.dataflow_url:
                allure.attach(f'<a href="{self.dataflow_url}" target="_blank">点击此处跳转到dataflow监控界面</a>',
                              '任务监控url', allure.attachment_type.HTML)

    def DFS_DATA_COPY_TEMPLATE_CDC(self, data):
        """
        增量不校验任务运行
        :param data:
        :return:
        """
        """
        全量+ 增量不校验任务为
        :param data:
        :return:
        """
        dataflow_name = data.get('dataflow_config').get('config').get('name')
        # operate info
        operate = data.pop('operate')
        operate_source = operate.get('source')
        operate_target = operate.get('target')
        source_info = operate_source.get('source_info')
        target_info = operate_target.get('target_info')
        # connector
        _source = self.init_connector(source_info)
        _target = self.init_connector(target_info)
        # ddl and dml
        source_table = operate_source.get('table')
        source_insert_column = operate_source.get('insert')
        source_update_column = operate_source.get('update')
        source_where_column = operate_source.get('where')
        target_table = operate_target.get('table')
        _source.tapclient_connector.CURRENT_TABLE = source_table
        _target.tapclient_connector.CURRENT_TABLE = target_table
        source_insert = _source.tapclient_connector.generate_insert(
            source_insert_column
        )
        source_update = _source.tapclient_connector.generate_update(
            source_update_column,
            source_where_column
        )
        source_delete = _source.tapclient_connector.generate_delete(
            source_where_column
        )

        # stats check
        _stats = operate.get('stats_check').get('cdc')
        _stats_initial = _stats.get('initial')
        _stats_insert = _stats.get('insert')
        _stats_update = _stats.get('update')
        _stats_delete = _stats.get('delete')
        # target drop table
        self.drop_target_table(_target)
        try:
            self.create_data_copy(data)
            self.dataflow_start_wait_running(dataflow_name)
            # 增量任务等待时间
            time.sleep(20)
            # 增量阶段校验
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_insert
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_dataflow_stats_update,
                self.get_dataflows_status,
                dataflow_name,
                _stats_update
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_dataflow_stats_delete,
                self.get_dataflows_status,
                dataflow_name,
                _stats_delete
            )
        except Exception as  e:
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            if self.dataflow_is_running(dataflow_name):
                self.dataflow_stopping_wait_stop(dataflow_name)
            if self.dataflow_url:
                allure.attach(f'<a href="{self.dataflow_url}" target="_blank">点击此处跳转到dataflow监控界面</a>',
                              '任务监控url', allure.attachment_type.HTML)
            _source.tapclient_connector.close()
            _target.tapclient_connector.close()

    def DFS_DATA_COPY_TEMPLATE_INITIAL_CDC(self, data):
        """
        全量+ 增量不校验任务为
        :param data:
        :return:
        """
        dataflow_name = data.get('dataflow_config').get('config').get('name')
        # operate info
        operate = data.pop('operate')
        operate_source = operate.get('source')
        operate_target = operate.get('target')
        source_info = operate_source.get('source_info')
        target_info = operate_target.get('target_info')
        # connector
        _source = self.init_connector(source_info)
        _target = self.init_connector(target_info)
        # ddl and dml
        source_table = operate_source.get('table')
        source_insert_column = operate_source.get('insert')
        source_update_column = operate_source.get('update')
        source_where_column = operate_source.get('where')
        target_table = operate_target.get('table')
        _source.tapclient_connector.CURRENT_TABLE = source_table
        _target.tapclient_connector.CURRENT_TABLE = target_table
        source_insert = _source.tapclient_connector.generate_insert(
            source_insert_column
        )
        source_update = _source.tapclient_connector.generate_update(
            source_update_column,
            source_where_column
        )
        source_delete = _source.tapclient_connector.generate_delete(
            source_where_column
        )

        # stats check
        _stats = operate.get('stats_check').get('initial+cdc')
        _stats_initial = _stats.get('initial')
        _stats_insert = _stats.get('insert')
        _stats_update = _stats.get('update')
        _stats_delete = _stats.get('delete')
        # target drop table
        self.drop_target_table(_target)
        try:
            self.create_data_copy(data)
            self.dataflow_start_wait_running(dataflow_name)
            # 全量阶段校验
            logger.info('initial check')
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_initial
            )
            # 增量阶段校验
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_insert
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_dataflow_stats_update,
                self.get_dataflows_status,
                dataflow_name,
                _stats_update
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_dataflow_stats_delete,
                self.get_dataflows_status,
                dataflow_name,
                _stats_delete
            )
        except Exception as  e:
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            if self.dataflow_is_running(dataflow_name):
                self.dataflow_stopping_wait_stop(dataflow_name)
            if self.dataflow_url:
                allure.attach(f'<a href="{self.dataflow_url}" target="_blank">点击此处跳转到dataflow监控界面</a>',
                              '任务监控url', allure.attachment_type.HTML)
            _source.tapclient_connector.close()
            _target.tapclient_connector.close()


class CTV2DataFlowParallel(CTV2DataFlow):

    def DFS_DATA_COPY_TEMPLATE_PARALLEL01(self, parallel_data):
        """
        全量不校验任务运行,并行执行任务
        :param parallel_data:
        :return:
        """
        data = None
        # 随机等待
        self.random_wait()
        # 并行案例初始化
        data = self.lock_and_get_table(parallel_data)
        record_id = data.get('id')
        dataflow_name = data.get('dataflow_config').get('config').get('name')

        operate = data.pop('operate')
        operate_source = operate.get('source')
        source_info = operate_source.get('source_info')
        operate_target = operate.get('target')
        target_info = operate_target.get('target_info')
        target_table = operate_target.get('table')

        # connector
        _source = self.init_connector(source_info)
        _target = self.init_connector(target_info)
        _target.tapclient_connector.CURRENT_TABLE = target_table

        source_where_column = operate_source.get('where')
        source_delete = _source.tapclient_connector.generate_delete(
            source_where_column
        )
        # stats check
        _stats = operate.get('stats_check').get('initial')
        _stats_initial = _stats.get('insert')
        self.drop_target_table(_target)
        try:
            self.create_data_copy(data)
            self.dataflow_start_wait_stop(dataflow_name)
            # initial stats insert 校验
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_initial
            )
        except Exception as e:
            self.release_table(record_id)
            logger.fatal(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.dataflow_url:
                allure.attach(f'<a href="{self.dataflow_url}" target="_blank">点击此处跳转到dataflow监控界面</a>',
                              '任务监控url', allure.attachment_type.HTML)
            _target.tapclient_connector.close()

    def DFS_DATA_COPY_TEMPLATE_PARALLEL02(self, parallel_data):
        """
        增量不校验任务运行,并行任务
        :param parallel_data:
        :return:
        """
        data = None
        # 随机等待
        self.random_wait()
        # 并行案例初始化
        data = self.lock_and_get_table(parallel_data)
        record_id = data.get('id')

        dataflow_name = data.get('dataflow_config').get('config').get('name')
        # operate info
        operate = data.pop('operate')
        operate_source = operate.get('source')
        operate_target = operate.get('target')
        source_info = operate_source.get('source_info')
        target_info = operate_target.get('target_info')
        # connector
        _source = self.init_connector(source_info)
        _target = self.init_connector(target_info)
        # ddl and dml
        source_table = operate_source.get('table')
        source_insert_column = operate_source.get('insert')
        source_update_column = operate_source.get('update')
        source_where_column = operate_source.get('where')
        target_table = operate_target.get('table')
        _source.tapclient_connector.CURRENT_TABLE = source_table
        _target.tapclient_connector.CURRENT_TABLE = target_table
        source_insert = _source.tapclient_connector.generate_insert(
            source_insert_column
        )
        source_update = _source.tapclient_connector.generate_update(
            source_update_column,
            source_where_column
        )
        source_delete = _source.tapclient_connector.generate_delete(
            source_where_column
        )

        # stats check
        _stats = operate.get('stats_check').get('cdc')
        _stats_initial = _stats.get('initial')
        _stats_insert = _stats.get('insert')
        _stats_update = _stats.get('update')
        _stats_delete = _stats.get('delete')
        # target drop table
        self.drop_target_table(_target)
        try:
            self.create_data_copy(data)
            self.dataflow_start_wait_running(dataflow_name)
            # 增量任务等待时间
            time.sleep(20)
            # 增量阶段校验
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_insert
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_dataflow_stats_update,
                self.get_dataflows_status,
                dataflow_name,
                _stats_update
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_dataflow_stats_delete,
                self.get_dataflows_status,
                dataflow_name,
                _stats_delete
            )
        except Exception as  e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.dataflow_is_running(dataflow_name):
                self.dataflow_stopping_wait_stop(dataflow_name)
            if self.dataflow_url:
                allure.attach(f'<a href="{self.dataflow_url}" target="_blank">点击此处跳转到dataflow监控界面</a>',
                              '任务监控url', allure.attachment_type.HTML)
            _source.tapclient_connector.close()
            _target.tapclient_connector.close()

    def DFS_DATA_COPY_TEMPLATE_PARALLEL03(self, parallel_data):
        """
        全量+增量不校验任务运行,并行任务
        :param parallel_data:
        :return:
        """
        data = None
        # 随机等待
        self.random_wait()
        # 并行案例初始化
        data = self.lock_and_get_table(parallel_data)
        record_id = data.get('id')

        dataflow_name = data.get('dataflow_config').get('config').get('name')
        # operate info
        operate = data.pop('operate')
        operate_source = operate.get('source')
        operate_target = operate.get('target')
        source_info = operate_source.get('source_info')
        target_info = operate_target.get('target_info')
        # connector
        _source = self.init_connector(source_info)
        _target = self.init_connector(target_info)
        # ddl and dml
        source_table = operate_source.get('table')
        source_insert_column = operate_source.get('insert')
        source_update_column = operate_source.get('update')
        source_where_column = operate_source.get('where')
        target_table = operate_target.get('table')
        _source.tapclient_connector.CURRENT_TABLE = source_table
        _target.tapclient_connector.CURRENT_TABLE = target_table
        source_insert = _source.tapclient_connector.generate_insert(
            source_insert_column
        )
        source_update = _source.tapclient_connector.generate_update(
            source_update_column,
            source_where_column
        )
        source_delete = _source.tapclient_connector.generate_delete(
            source_where_column
        )

        # stats check
        _stats = operate.get('stats_check').get('initial+cdc')
        _stats_initial = _stats.get('initial')
        _stats_insert = _stats.get('insert')
        _stats_update = _stats.get('update')
        _stats_delete = _stats.get('delete')
        # target drop table
        self.drop_target_table(_target)
        try:
            self.create_data_copy(data)
            self.dataflow_start_wait_running(dataflow_name)
            # 全量阶段校验
            logger.info('initial check')
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_initial
            )
            # 增量阶段校验
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_dataflow_stats_insert,
                self.get_dataflows_status,
                dataflow_name,
                _stats_insert
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_dataflow_stats_update,
                self.get_dataflows_status,
                dataflow_name,
                _stats_update
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_dataflow_stats_delete,
                self.get_dataflows_status,
                dataflow_name,
                _stats_delete
            )
        except Exception as  e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.dataflow_is_running(dataflow_name):
                self.dataflow_stopping_wait_stop(dataflow_name)
            if self.dataflow_url:
                allure.attach(f'<a href="{self.dataflow_url}" target="_blank">点击此处跳转到dataflow监控界面</a>',
                              '任务监控url', allure.attachment_type.HTML)
            _source.tapclient_connector.close()
            _target.tapclient_connector.close()
