#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: case01_base.py 
@version:
@time: 2022/03/15 
"""
import random
import time

from TapClientCaseData.CaseGenerateData.case_generator.case_data_generate_base import *
import db_info


class CGB:
    CGB_SOURCE = path_joint(CASE_GENERATE_BASE, 'source')
    CGB_TARGET = read_yaml(path_joint(CASE_GENERATE_BASE, 'target.yml'))
    CGB_STATS = read_yaml(path_joint(CASE_GENERATE_BASE, 'stats_check.yml'))

    CGBS_MYSQL = read_yaml(path_joint(CGB_SOURCE, '01mysql.yml'))
    CGBS_ORACLE = read_yaml(path_joint(CGB_SOURCE, '02oracle.yml'))
    CGBS_MONGODB = read_yaml(path_joint(CGB_SOURCE, '03mongodb.yml'))
    CGBS_DB2 = read_yaml(path_joint(CGB_SOURCE, '04db2.yml'))
    CGBS_POSTGRES = read_yaml(path_joint(CGB_SOURCE, '05postgres.yml'))
    CGBS_SQLSERVER = read_yaml(path_joint(CGB_SOURCE, '06sqlserver.yml'))
    CGBS_GBASE = read_yaml(path_joint(CGB_SOURCE, '07Gbase.yml'))
    CGBS_GAUSSDB = read_yaml(path_joint(CGB_SOURCE, '09gaussdb200.yml'))
    CGBS_ES = read_yaml(path_joint(CGB_SOURCE, '10es.yml'))
    CGBS_KAFKA = read_yaml(path_joint(CGB_SOURCE, '12kafka.yml'))
    CGBS_MARIADB = read_yaml(path_joint(CGB_SOURCE, '13mariadb.yml'))
    CGBS_GREENPLUM = read_yaml(path_joint(CGB_SOURCE, '20greenplum.yml'))
    CGBS_TIDB = read_yaml(path_joint(CGB_SOURCE, '21TiDB.yml'))
    CGBS_KUNDB = read_yaml(path_joint(CGB_SOURCE, '24kundb.yml'))

    db_info_class = get_classes(db_info)
    dml_ddl_base = get_classes(base00_common)

    def __init__(self):
        self.target = self.CGB_TARGET

    def assemble(self, CGBS, caseType, _type):
        if _type == 'dataflow':
            return self.dataflow_assemble(CGBS, caseType)
        elif _type == 'task data copy':
            return self.task_data_copy_assemble(CGBS, caseType)
        elif _type == 'task data development':
            return self.task_data_development_assemble(CGBS, caseType)
        elif _type == 'task data copy2':
            return self.task_data_copy_assemble2(CGBS, caseType)
        elif _type == 'task data development2':
            return self.task_data_development_assemble2(CGBS, caseType)
        # for dummy
        elif _type == 'task data copy dummy':
            return self.task_data_copy_assemble_dummy(CGBS, caseType)
        elif _type == 'task data development dummy':
            return self.task_data_development_dummy(CGBS, caseType)
        elif _type == 'task data copy 2dummy':
            return self.task_data_copy_assemble_2dummy(CGBS, caseType)
        elif _type == 'task data development 2dummy':
            return self.task_data_development_assemble_2dummy(CGBS, caseType)

    def dataflow_assemble(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)

        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats
        )
        this_data.update(_generate_destory_table)
        return this_data

    def task_data_copy_assemble(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)
        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data_copy(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats
        )
        this_data.update(_generate_destory_table)
        return this_data

    def task_data_development_assemble(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)
        sync_setting = CGBS.get('GenerateCaseData').get(case_type)
        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data_development(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats,
            sync_setting
        )
        this_data.update(_generate_destory_table)
        return this_data

    def task_data_copy_assemble2(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)
        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data_copy2(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats
        )
        this_data.update(_generate_destory_table)
        return this_data

    def task_data_development_assemble2(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)
        sync_setting = CGBS.get('GenerateCaseData').get(case_type)
        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data_development2(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats,
            sync_setting
        )
        this_data.update(_generate_destory_table)
        return this_data

    # dummy

    def task_data_copy_assemble_dummy(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        # stats 获取
        _stats = self._generate_stats(case_type)
        this_data = self._generate_case_data_copy_dummy(
            source_type,
            case_type,
            db_info_name,
            _stats
        )
        return this_data

    def task_data_development_dummy(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        sync_setting = CGBS.get('GenerateCaseData').get(case_type)
        # stats 获取
        _stats = self._generate_stats(case_type)
        this_data = self._generate_case_data_development_dummy(
            source_type,
            case_type,
            db_info_name,
            _stats,
            sync_setting
        )
        return this_data

    def task_data_copy_assemble_2dummy(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)
        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data_copy_2dummy(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats
        )
        this_data.update(_generate_destory_table)
        return this_data

    def task_data_development_assemble_2dummy(self, CGBS, caseType):
        source_type = CGBS.get('type')
        db_info_name = CGBS.get('db_info_name')
        case_type = caseType
        generate_table_record = CGBS.get('GenerateTableRecord').get(case_type)
        sync_setting = CGBS.get('GenerateCaseData').get(case_type)
        # 表生成与删除
        _generate_destory_table = self._generate_table_record(
            source_type,
            db_info_name,
            case_type,
            generate_table_record
        )
        # stats 获取
        _stats = self._generate_stats(case_type)
        sql = _generate_destory_table.pop('sql')
        source_db_info = sql.pop('db_info')
        this_data = self._generate_case_data_development_2dummy(
            source_type,
            case_type,
            source_db_info,
            sql,
            _stats,
            sync_setting
        )
        this_data.update(_generate_destory_table)
        return this_data

    # 自动建表，drop表
    def _generate_table_record(
            self,
            source_type,
            db_info_name,
            case_type,
            generate_table_record
    ):
        generate_table_record_class_key = generate_table_record.get('DDL_DML_CLASS')
        generate_table_record_create = generate_table_record.get('Create')
        generate_table_record_insert_1 = generate_table_record.get('Insert')
        generate_table_record_amount = generate_table_record.get('Amount')
        insert_number = generate_table_record.get('InsertNumber')
        generate_table = generate_table_record.get('Table')
        # db info get
        source_dbinfo = getattr(self.db_info_class.get(source_type), db_info_name)
        generate_table_record_class = self.dml_ddl_base.get(generate_table_record_class_key)
        # create, insert string get
        create_str = getattr(generate_table_record_class, generate_table_record_create)
        insert1_str = getattr(generate_table_record_class, generate_table_record_insert_1)
        # cdc get
        _cdc = generate_table_record.get('cdc')
        where_str = None
        insert2_str = None
        update_str = None
        update_str2 = None
        if _cdc:
            generate_case_insert2_str = _cdc.get('Insert')
            generate_case_update_str = _cdc.get('Update')
            generate_case_update2_str = _cdc.get('Update2')
            generate_case_where_str = _cdc.get('Where')
            # generate case data use info
            insert2_str = getattr(generate_table_record_class, generate_case_insert2_str)
            update_str = getattr(generate_table_record_class, generate_case_update_str)
            if generate_case_update2_str:
                update_str2 = getattr(generate_table_record_class, generate_case_update2_str)
            where_str = getattr(generate_table_record_class, generate_case_where_str)
            # tableGenerate data
        table_generate = {
            'test_table_type': case_type,
            'db_info': source_dbinfo,
            'create': create_str,
            'insert': insert1_str,
            'where': where_str,
            'amount': generate_table_record_amount,
        }
        if generate_table:
            table_generate.update(
                {'table': f"{generate_table}"}
            )
        if insert_number:
            table_generate.update(
                {'insertNumber': insert_number}
            )
        else:
            table_generate.update(
                {'insertNumber': 1}
            )
        # tableDestroy data
        table_destroy = {
            'test_table_type': case_type,
            'db_info': source_dbinfo,
        }
        return {
            'generateTable': table_generate,
            'destroyTable': table_destroy,
            'sql': {
                'db_info': source_dbinfo,
                'insert2': insert2_str,
                'update': update_str,
                'update2': update_str2,
                'where': where_str
            }
        }

    # stats
    def _generate_stats(self, case_type):
        _stats_check = self.CGB_STATS.get(case_type)
        check_initial_stats = None
        check_cdc_stats = None
        check_initial_cdc_stats = None
        if _stats_check:
            check_initial_stats = _stats_check.get('initial')
            check_cdc_stats = _stats_check.get('cdc')
            check_initial_cdc_stats = _stats_check.get('initial+cdc')
        return {
            'initial': check_initial_stats,
            'cdc': check_cdc_stats,
            'initial_cdc': check_initial_cdc_stats
        }

    # case geneate for dataflow
    def _generate_case_data(
            self,
            source_type,
            case_type,
            source_db_info,
            sql,
            stats
    ):
        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')

        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'dataflow_type': 'data clone',
                'dataflow_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'dataflow_{case_type}_{db_info_name}2{target_name}',
                    },
                    'relationship': {
                        'syncObjects':
                            {
                                "objectNames": [],
                                "type": "table"
                            }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{source_type}2{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data copy
    def _generate_case_data_copy(self, source_type, case_type, source_db_info, sql, stats):
        # CaseDataGenerate

        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _update2 = sql.get('update2')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')
        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data copy',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'syncObjects':
                            {
                                "objectNames": [],
                                "type": "table"
                            }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'update2': _update2,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{source_type}2{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data development
    def _generate_case_data_development(self, source_type, case_type, source_db_info, sql, stats, sync_setting):

        # CaseDataGenerate

        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _update2 = sql.get('update2')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')

        # 数据开发特别选项
        sync_type = sync_setting.get('type')
        condition = sync_setting.get('condition')

        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data development',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'sync_{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'source': {
                            **sync_type
                        },
                        'sink': {
                            **sync_type,
                            **condition
                        }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'update2': _update2,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{source_type}2{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data copy2
    def _generate_case_data_copy2(self, source_type, case_type, source_db_info, sql, stats):
        # CaseDataGenerate

        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')
        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data copy',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'syncObjects':
                            {
                                "objectNames": [],
                                "type": "table"
                            }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data development2
    def _generate_case_data_development2(self, source_type, case_type, source_db_info, sql, stats, sync_setting):

        # CaseDataGenerate

        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')

        # 数据开发特别选项
        sync_type = sync_setting.get('type')
        condition = sync_setting.get('condition')

        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data development',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'sync_{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'source': {
                            **sync_type
                        },
                        'sink': {
                            **sync_type,
                            **condition
                        }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data copy
    def _generate_case_data_copy_dummy(self, source_type, case_type, db_info_name, stats):
        # CaseDataGenerate

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')
        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data copy',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'syncObjects':
                            {
                                "objectNames": [],
                                "type": "table"
                            }
                    },
                },
                'operate': {
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{source_type}2{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data development
    def _generate_case_data_development_dummy(self, source_type, case_type, db_info_name, stats, sync_setting):

        # CaseDataGenerate

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')

        # 数据开发特别选项
        sync_type = sync_setting.get('type')
        condition = sync_setting.get('condition')

        generate_tmp = {}
        for target_key, target_name in self.target.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                'parallel_use_record_name': case_type,
                'task_type': 'data development',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'sync_{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'source': {
                            **sync_type
                        },
                        'sink': {
                            **sync_type,
                            **condition
                        }
                    },
                },
                'operate': {
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'{source_type}2{target_key}'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data copy
    def _generate_case_data_copy_2dummy(self, source_type, case_type, source_db_info, sql, stats):
        # CaseDataGenerate

        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')
        generate_tmp = {}
        for target_key, target_name in {'Dummy': self.target.get('Dummy')}.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data copy',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'syncObjects':
                            {
                                "objectNames": [],
                                "type": "table"
                            }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'Dummy'] = case_data_tmp
        return {'generateCase': generate_tmp}

    # case generate for data development
    def _generate_case_data_development_2dummy(self, source_type, case_type, source_db_info, sql, stats, sync_setting):

        # CaseDataGenerate

        db_info_name = source_db_info.get('name')
        _insert = sql.get('insert2')
        _update = sql.get('update')
        _where = sql.get('where')

        check_initial_stats = stats.get('initial')
        check_cdc_stats = stats.get('cdc')
        check_initial_cdc_stats = stats.get('initial_cdc')

        # 数据开发特别选项
        sync_type = sync_setting.get('type')
        condition = sync_setting.get('condition')

        generate_tmp = {}
        for target_key, target_name in {'Dummy': self.target.get('Dummy')}.items():
            sink_db_info = getattr(self.db_info_class.get(target_key), target_name)
            case_data_tmp = {
                # 'name': f'{db_info_name}2{target_name}',
                'parallel_use_record_name': case_type,
                'task_type': 'data development',
                'task_config': {
                    'source': db_info_name,
                    'target': target_name,
                    'config': {
                        'sync_type': 'initial_sync',
                        'name': f'sync_{case_type}_{db_info_name}2{target_name}',
                        'type': 'initial_sync',
                    },
                    'relationship': {
                        'source': {
                            **sync_type
                        },
                        'sink': {
                            **sync_type,
                            **condition
                        }
                    },
                },
                'operate': {
                    'source': {
                        'source_info': source_db_info,
                        'table': '',
                        'insert': _insert,
                        'update': _update,
                        'where': _where
                    },
                    'target': {
                        'target_info': sink_db_info,
                        'table': '',
                    },
                    'stats_check': {
                        'initial': check_initial_stats,
                        'cdc': check_cdc_stats,
                        'initial+cdc': check_initial_cdc_stats,

                    }
                }
            }
            generate_tmp[f'Dummy'] = case_data_tmp
        return {'generateCase': generate_tmp}

    def assemble_target(self):
        target_info = {}
        target = self.target
        for _type, name in target.items():
            target_info[_type] = getattr(self.db_info_class.get(_type), name)
        return target_info

    def cgb_mysql(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_MYSQL, case, _type)

    def cgb_oracle(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_ORACLE, case, _type)

    def cgb_mongodb(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_MONGODB, case, _type)

    def cgb_postgres(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_POSTGRES, case, _type)

    def cgb_sqlserver(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_SQLSERVER, case, _type)

    def cgb_gbase(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_GBASE, case, _type)

    def cgb_mariadb(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_MARIADB, case, _type)

    def cgb_tidb(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_TIDB, case, _type)

    def cgb_kundb(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_KUNDB, case, _type)

    def cgb_gaussdb(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_GAUSSDB, case, _type)

    def cgb_greenplum(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_GREENPLUM, case, _type)

    def cgb_kafka(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_KAFKA, case, _type)

    def cgb_es(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_ES, case, _type)

    def cgb_db2(self, case='demo', _type='dataflow'):
        return self.assemble(self.CGBS_DB2, case, _type)

    def _assemble_source(self):
        source_info = {}
        source = self.CGB_SOURCE
        for _type, name in source.items():
            source_info[_type] = getattr(self.db_info_class.get(_type), name.get(_type))
        return source_info

    @property
    def cgb_target(self):
        return self.assemble_target()


if __name__ == '__main__':
    T = CGB()
    from pprint import pprint

    pprint(T.cgb_mysql(_type='task data development'))
