#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: ct_operate_node02.py 
@version:
@time: 2022/06/08 
"""
from TapClientCore.T07_CaseTemplate.v2.CT03_task.ct_task import CTTask, allure, logger, check_wait, time


class CTTaskNode07AGG(CTTask):
    agg = None
    name = '自动化-聚合'
    agg_column_set = None

    # 节点信息更新
    def agg_update(self, task_name, task_id):
        time.sleep(check_wait)
        with allure.step(f'获取node schema 并进行{self.name}节点更新'):
            source_node_id = self.TMF_TASK.query_task_by_id(task_id).get('data').get('dag').get('edges')[1].get(
                'source')
            fields = self.get_field_by_node_id(source_node_id)
            field_update = self.agg_meta(fields)
        with allure.step('agg 节点更新'):
            field_rename = {
                'fieldNames': field_update
            }
            update_response = self.update_task_op_node(task_name, self.name, field_rename)
            # logger.info(update_response)

    # agg修改meta信息修改
    @staticmethod
    def agg_meta(fields):
        tmp = []
        for old in fields:
            tmp.append(
                [
                    {
                        'isPrimaryKey': True,
                        'label': one.get('field_name'),
                        'value': one.get('field_name'),
                    } if one.get('primary_key_position') else
                    {
                        'isPrimaryKey': False,
                        'label': one.get('field_name'),
                        'value': one.get('field_name'),
                    }
                    for one in [old]
                ][0]
            )
        return tmp

    # merge sort
    @staticmethod
    def agg_data_sort(data, column):
        return data

    # agg data
    def agg_func(self, data):
        new_data = []
        for one_agg in self.agg:
            agge = one_agg.get('aggExpression')
            aggf = one_agg.get('aggFunction')
            group = one_agg.get('groupByExpression')
            if aggf == 'COUNT':
                collect_data = {}
                for one_data in data:
                    tmp_key = ''
                    # ['title']
                    # { title-:n, insert-:n }
                    # [ 'pk', 'title' ]
                    # { 1-title-:n, 2-title-:n }
                    for one_group in group:
                        tmp_key += f'{one_data.get(one_group)}-'
                    if tmp_key in collect_data:
                        collect_data.update({tmp_key: collect_data.get(tmp_key) + 1})
                    else:
                        collect_data.update({tmp_key: 1})

                    def read_collect(xycolumn):
                        x = xycolumn[0]
                        y = xycolumn[1]
                        _group = xycolumn[2]
                        x = x.split('-')
                        x.remove('')
                        _data = {_group[i]: x[i] for i in range(len(_group))}
                        _data.update({aggf: str(y)})
                        return _data

                    new_data = list(map(read_collect, [(k, v, group) for k, v in collect_data.items()]))
                return new_data
            elif aggf == 'AVG':
                collect_data = {}
                for one_data in data:
                    tmp_key = ''
                    # ['title']
                    # { title-:n, insert-:n }
                    # [ 'pk', 'title' ]
                    # { 1-title-:n, 2-title-:n }
                    for one_group in group:
                        tmp_key += f'{one_data.get(one_group)}-'
                    # 获取sum
                    agge_sum = one_data.get(agge)

                    if tmp_key in collect_data:
                        collect_data.update({tmp_key: [
                            collect_data.get(tmp_key)[0] + agge_sum,
                            collect_data.get(tmp_key)[1] + 1
                        ]})
                    else:
                        collect_data.update({tmp_key: [agge_sum, 1]})

                    def read_collect(xycolumn):
                        x = xycolumn[0]
                        y = xycolumn[1][0]
                        y_count = xycolumn[1][1]
                        _group = xycolumn[2]
                        x = x.split('-')
                        x.remove('')
                        _data = {_group[i]: x[i] for i in range(len(_group))}
                        _data.update({aggf: '{0:.4f}'.format(y / y_count)})
                        return _data

                    new_data = list(map(read_collect, [(k, v, group) for k, v in collect_data.items()]))
                return new_data
            elif aggf == 'MAX':
                collect_data = {}
                for one_data in data:
                    tmp_key = ''
                    # ['title']
                    # { title-:n, insert-:n }
                    # [ 'pk', 'title' ]
                    # { 1-title-:n, 2-title-:n }
                    for one_group in group:
                        tmp_key += f'{one_data.get(one_group)}-'
                    # 获取max
                    agge_max = one_data.get(agge)

                    if tmp_key in collect_data:
                        if agge_max < collect_data.get(tmp_key):
                            pass
                        else:
                            collect_data.update({tmp_key: agge_max})
                    else:
                        collect_data.update({tmp_key: agge_max})

                    def read_collect(xycolumn):
                        x = xycolumn[0]
                        y = xycolumn[1]
                        _group = xycolumn[2]
                        x = x.split('-')
                        x.remove('')
                        _data = {_group[i]: x[i] for i in range(len(_group))}
                        _data.update({aggf: str(y)})
                        return _data

                    new_data = list(map(read_collect, [(k, v, group) for k, v in collect_data.items()]))
                return new_data
            elif aggf == 'MIN':
                collect_data = {}
                for one_data in data:
                    tmp_key = ''
                    # ['title']
                    # { title-:n, insert-:n }
                    # [ 'pk', 'title' ]
                    # { 1-title-:n, 2-title-:n }
                    for one_group in group:
                        tmp_key += f'{one_data.get(one_group)}-'
                    # 获取min
                    agge_min = one_data.get(agge)

                    if tmp_key in collect_data:
                        if agge_min > collect_data.get(tmp_key):
                            pass
                        else:
                            collect_data.update({tmp_key: agge_min})
                    else:
                        collect_data.update({tmp_key: agge_min})

                    def read_collect(xycolumn):
                        x = xycolumn[0]
                        y = xycolumn[1]
                        _group = xycolumn[2]
                        x = x.split('-')
                        x.remove('')
                        _data = {_group[i]: x[i] for i in range(len(_group))}
                        _data.update({aggf: str(y)})
                        return _data

                    new_data = list(map(read_collect, [(k, v, group) for k, v in collect_data.items()]))
                return new_data
            elif aggf == 'SUM':
                collect_data = {}
                for one_data in data:
                    tmp_key = ''
                    # ['title']
                    # { title-:n, insert-:n }
                    # [ 'pk', 'title' ]
                    # { 1-title-:n, 2-title-:n }
                    for one_group in group:
                        tmp_key += f'{one_data.get(one_group)}-'
                    # 获取sum
                    agge_sum = one_data.get(agge)

                    if tmp_key in collect_data:
                        collect_data.update({tmp_key: collect_data.get(tmp_key) + agge_sum})
                    else:
                        collect_data.update({tmp_key: agge_sum})

                    def read_collect(xycolumn):
                        x = xycolumn[0]
                        y = xycolumn[1]
                        _group = xycolumn[2]
                        x = x.split('-')
                        x.remove('')
                        _data = {_group[i]: x[i] for i in range(len(_group))}
                        _data.update({aggf: str(y)})
                        return _data

                    new_data = list(map(read_collect, [(k, v, group) for k, v in collect_data.items()]))
                return new_data
        return new_data

    # agg column
    def agg_column(self, data):
        logger.info(f'source column is {data}')
        return self.agg_column_set

    # 字段类型修改配置信息获取
    def AGG(self, data):
        node_config = data.pop('node_config')
        self.agg = node_config.get('agg')
        self.agg_column_set = node_config.get('column')
        data['task_config']['node'] = node_config
        return data

    # 全量 change
    def TASK_AGG_INITIAL_CHANGE(self, parallel_data):
        """
                全量不校验任务运行,并行执行任务
                :param parallel_data:
                :return:
                """
        data = None
        record_id = None
        task_name = None
        # 随机等待
        self.random_wait()
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            operate = data.pop('operate')
            operate_source = operate.get('source')
            source_info = operate_source.get('source_info')
            source_table = operate_source.get('table')
            operate_target = operate.get('target')
            target_info = operate_target.get('target_info')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            data = self.AGG(data)

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table

            source_where_column = operate_source.get('where')
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            # stats check
            _stats = operate.get('stats_check').get('initial')
            _stats_initial = _stats.get('insert')
            self.drop_target_table(_target)
            with allure.step('创建任务'):
                task_id = self.create_data_copy_development(data).get('id')

            # 数据处理节点 字段改名 节点更新,agg no need this
            # self.agg_update(task_name, task_id)

            # initial stats insert 校验
            with allure.step('任务状态等待'):
                self.scheduled_task_wait_it_complete(task_name)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            column = self.column_agg_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_column
            )
            # check
            self.tap_merge_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )
        finally:
            self.release_table(record_id)
            self.generate_task_url(task_name, data.get('task_type'))
            logger.info(self.task_url)
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass

    # 增量
    def TASK_AGG_CDC_CHANGE(self, parallel_data):
        data = None
        # 随机等待
        self.random_wait()
        record_id = None
        source_delete = None
        task_name = None
        try:
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            # operate info
            operate = data.pop('operate')
            operate_source = operate.get('source')
            operate_target = operate.get('target')
            source_info = operate_source.get('source_info')
            target_info = operate_target.get('target_info')

            # ddl and dml
            source_table = operate_source.get('table')
            source_insert_column = operate_source.get('insert')
            source_update_column = operate_source.get('update')
            source_where_column = operate_source.get('where')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            with allure.step('任务名获取'):
                task_name = data.get('task_config').get('config').get('name')

            data = self.AGG(data)

            # connector
            with allure.step('源数据库初始化'):
                _source = self.init_connector(source_info)
            with allure.step('目标数据库初始化'):
                _target = self.init_connector(target_info)
            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_insert = _source.tapclient_connector.generate_insert(
                source_insert_column
            )
            source_update = _source.tapclient_connector.generate_update(
                source_update_column,
                source_where_column
            )
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )

            # stats check
            _stats = operate.get('stats_check').get('cdc')
            _stats_initial = _stats.get('initial')
            _stats_insert = _stats.get('insert')
            _stats_update = _stats.get('update')
            _stats_delete = _stats.get('delete')
            # target drop table
            self.drop_target_table(_target)
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            with allure.step('创建任务'):
                task_id = self.create_data_copy_development(data).get('id')

            # 数据处理节点 字段改名 节点更新
            self.agg_update(task_name, task_id)

            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 增量任务等待时间
            logger.info('等待一段时间')
            time.sleep(180)
            # insert
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            column = self.column_agg_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_column
            )
            # check
            self.tap_merge_cdc_check(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )
            # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_update
            )
            # check
            self.tap_merge_cdc_check(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )
            # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_delete
            )
            # check
            self.tap_merge_cdc_check(
                source_info,
                source_table,
                source_where_column,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )
        except Exception as e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass

    # 全量+增量
    def TASK_AGG_INITIAL_CDC_CHANGE(self, parallel_data):
        data = None
        source_delete = None
        record_id = None
        _source = None
        _target = None
        task_name = None
        try:
            # 随机等待
            self.random_wait()
            # 并行案例初始化
            with allure.step('获取可用表'):
                data = self.lock_and_get_table(parallel_data)
            record_id = data.get('id')

            # operate info
            operate = data.pop('operate')
            operate_source = operate.get('source')
            operate_target = operate.get('target')
            source_info = operate_source.get('source_info')
            target_info = operate_target.get('target_info')
            # connector
            _source = self.init_connector(source_info)
            _target = self.init_connector(target_info)
            # ddl and dml
            source_table = operate_source.get('table')
            source_insert_column = operate_source.get('insert')
            source_update_column = operate_source.get('update')
            source_where_column = operate_source.get('where')
            target_table = operate_target.get('table')

            data['task_config']['config']['name'] += source_table
            task_name = data.get('task_config').get('config').get('name')

            _source.tapclient_connector.CURRENT_TABLE = source_table
            _target.tapclient_connector.CURRENT_TABLE = target_table
            source_insert = _source.tapclient_connector.generate_insert(
                source_insert_column
            )
            source_update = _source.tapclient_connector.generate_update(
                source_update_column,
                source_where_column
            )
            source_delete = _source.tapclient_connector.generate_delete(
                source_where_column
            )

            data = self.AGG(data)

            # stats check
            _stats = operate.get('stats_check').get('initial+cdc')
            _stats_initial = _stats.get('initial')
            _stats_insert = _stats.get('insert')
            _stats_update = _stats.get('update')
            _stats_delete = _stats.get('delete')
            # target drop table
            self.drop_target_table(_target)
            with allure.step('执行前删除要插入的数据'):
                _source.tapclient_connector.exec(source_delete)
            with allure.step('创建任务'):
                task_id = self.create_data_copy_development(data).get('id')

                # 数据处理节点 字段改名 节点更新
            self.agg_update(task_name, task_id)

            with allure.step('等待任务运行'):
                self.scheduled_task_wait_it_running(task_name)
            # 全量阶段校验
            logger.info('initial check')
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_initial
            )
            column = self.column_agg_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_column
            )
            # check
            self.tap_merge_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )
            # 增量阶段校验
            logger.info('cdc insert and check')
            _source.tapclient_connector.exec(source_insert)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_insert
            )
            # check
            self.tap_merge_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )  # update
            _source.tapclient_connector.exec(source_update)
            self.func_wait(
                self.get_task_stats_insert,
                self.get_task_status,
                task_name,
                _stats_update
            )
            # check
            self.tap_merge_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )  # delete
            _source.tapclient_connector.exec(source_delete)
            self.func_wait(
                self.get_task_stats_delete,
                self.get_task_status,
                task_name,
                _stats_delete
            )
            # check
            self.tap_merge_check(
                source_info,
                source_table,
                target_info,
                target_table,
                self.agg_func,
                self.agg_data_sort,
                column
            )
        except Exception as e:
            self.release_table(record_id)
            logger.info(e)
            _source.tapclient_connector.exec(source_delete)
            assert False
        finally:
            self.release_table(record_id)
            if self.task_is_running(task_name):
                self.stop_task_wait_it_paused(task_name)
            self.generate_task_url(task_name, data.get('task_type'))
            try:
                _source.tapclient_connector.close()
                _target.tapclient_connector.close()
            except Exception as e:
                pass
