import re
import time
import json

from testcase import deep_update
from wt.api_service.api_imp.database_api import DataBaseAPI
from wt.init_param.task_init import TaskInit
from wt.connector import workflow_func
# from wt.old_auto_test.auto_task_define.batch_task_create.batch_datasource_schema_next import BatchDatasourceSchemaNext
from wt.auto_test_case.database_case import DatabaseCase
from wt.untils.common_until import CommonUntil
from wt.untils.decorators import Logger
from wt.api_service.api_imp.project_api import ProjectAPI
from loguru import logger

class WtTaskAPI(TaskInit):
    def __init__(self):
        super().__init__()
        self.logger = Logger("my")
        res_db_log = open(self.res_db_log, 'wb+', buffering=0)
        self.wt_request = workflow_func.WorkFlowExec(self.login_url, self.headers)
        self.wt_project = ProjectAPI(res_db_log)
        self.common_until = CommonUntil(res_db_log)
        # self.d_schema_next = BatchDatasourceSchemaNext(res_db_log)
        self.d_api = DataBaseAPI(res_db_log)
        self.t_case = DatabaseCase()
        self.create_integra_res = {}

    def alert_set_or_query(self, jobid_or_jobname, method):
        if method == "batch_alert_set":
            self.wt_set_batch_alert_conf_url = self.update_task_id_in_url(self.wt_set_batch_alert_conf_url, "batch")
            self.wt_set_batch_alert_conf_url = self.update_task_id_in_url(self.wt_set_batch_alert_conf_url, "batch",
                                                                          str(jobid_or_jobname))
            tmp = self.wt_request.send_request(self.wt_set_batch_alert_conf_url, 'put',
                                               json_data=self.wt_batch_task_alert_config_json)
            res = tmp.text
        elif method == "stream_alert_set":
            self.wt_set_stream_alert_conf_url = self.update_task_id_in_url(self.wt_set_stream_alert_conf_url,
                                                                           "streaming")
            self.wt_set_stream_alert_conf_url = self.update_task_id_in_url(self.wt_set_stream_alert_conf_url,
                                                                           "streaming", str(jobid_or_jobname))
            tmp = self.wt_request.send_request(self.wt_set_stream_alert_conf_url, 'put',
                                               json_data=self.wt_stream_task_alert_config_json)
            res = tmp.text
        elif method == "query":
            self.wt_alert_send_list_json["searchVal"] = jobid_or_jobname
            tmp = self.wt_request.send_request(self.wt_query_alert_url, 'post', json_data=self.wt_alert_send_list_json)
            res = tmp.text
            print(tmp)
        else:
            res = "请输入正常jobid_or_jobname 以及 method, "
        return res

    @Logger()
    def sink_schema_next(self, job_id):
        self.wt_task_sink_schema_next_url = self.wt_task_sink_schema_next_url + str(job_id) + "?refresh="""
        wf_task_sink_define = workflow_func.WorkFlowExec(self.login_url, self.headers)
        res = self.wt_request(self.wt_task_sink_schema_next_url, 'post')
        print("返回值成功：{}".format(res["success"]))

    @Logger()
    def get_connector_mapper_json(self, source_db=None, sink_db=None):
        """
        获取source 创建的json， get_source_or_sink_json(source_db='JDBC-Mysql') 表示获取source是JDBC-Mysql的json
        :param db: JDBC-Mysql
        :return:
        """
        source_json = ""
        sink_json = ""

        if source_db is not None:
            if source_db in ["JDBC-Mysql", "JDBC-Redshift", "JDBC-Postgres", "JDBC-TiDB", "JDBC-SQLServer",
                             "JDBC-KingBase", "JDBC-Oracle", "JDBC-Informix", "JDBC-DAMENG", "JDBC-DB2", "JDBC-HIVE",
                             "JDBC-HIVE2", "JDBC-Sqlite", "JDBC-OpenGauss", "JDBC-INCEPTOR", "JDBC-GaussDB",
                             "JDBC-Phoenix", "JDBC-Impala", "DorisV1", "DorisV2", "Kafka-Kingbase"]:
                source_json = self.connector_jdbc_mappers_source
            elif source_db in ["MySQL-CDC", "Oracle-CDC", "OracleAgent-CDC","Postgres-CDC", "SqlServer-CDC", "Dameng-CDC",
                               "Informix-CDC", "MongoDB-CDC", "Highgo-CDC", "OpenGauss-CDC", "Kafka-Kingbase-CDC",
                               "Kafka-CDC"]:
                source_json = self.connector_cdc_mappers[source_db+"_source"]
            elif source_db in ["S3", "LocalFile", "FtpFile", "SftpFile", "Http", "JDBC-SapHana",
                               "ElasticSearch", "MongoDB", "Kafka", "Apache_Hive", "HdfsFile","OssFile",
                               "starrocks_as_source"]:
                source_json = self.connector_file_mappers[source_db+"_source"]
            else:
                print("传入的source_db {}, 没有进行匹配".format(source_db))
            return source_json

        if sink_db is not None:
            if sink_db in ["JDBC-Mysql", "JDBC-Redshift", "JDBC-Postgres", "JDBC-TiDB", "JDBC-SQLServer",
                           "JDBC-KingBase", "JDBC-Oracle", "JDBC-Informix", "JDBC-DB2", "JDBC-HIVE",
                           "JDBC-HIVE2", "JDBC-Sqlite", "JDBC-OpenGauss", "JDBC-INCEPTOR", "JDBC-GaussDB",
                           "JDBC-Phoenix", "JDBC-Impala", "JDBC-DAMENG"]:
                sink_json = self.connector_jdbc_mappers_sink
            elif sink_db in ["ElasticSearch", "MongoDB", "OssFile", "CONSOLE","Iceberg","DolphinDB","DorisV1", "DorisV2",
                             "S3-Redshift", "DWS-GaussDB", "HdfsFile", "SelectDBCloud","Apache_Hive","S3","LocalFile",
                             "JDBC-Hana", "FtpFile", "SftpFile", "LocalFile","StarRocks", "Kafka", "JDBC-SapHana", "starrocks_as_sink"]:
                sink_json = self.connector_file_mappers[sink_db+"_sink"]
                logger.info("开始获取：connector_jdbc_mappers_sink {}".format(sink_json))
            else:
                print("传入的 sink_db {}, 没有进行匹配".format(sink_db))
            return sink_json

        print("没有填写正确的source_db 或 sink_db")

    @Logger()
    def get_wt_define_task_id(self, project_name, task_name):
        project_code = self.wt_project.get_project_code(project_name)
        params = {"pageSize": "10", "pageNo": "1", "searchName": task_name, "projectCodes": project_code, "jobMode": ""}
        res = self.wt_request.send_request(self.wt_task_definition_url, "get", params=params)
        wt_task_define_id = self.common_until.get_res_json_value(res, "id")

        print("获取的wt_define_task_id {} , res：{}".format(wt_task_define_id, res.json()))
        return wt_task_define_id

    def import_wt_task(self, project_name, job_name):
        project_code = self.wt_project.get_project_code(project_name)
        import_url = self.wt_import_url + "?projectCode=" + str(project_code)
        if job_name == "wt_import_transform_single_dml_customerSql_json":
            self.wt_import_transform_single_dml_customerSql_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_transform_single_dml_customerSql_json
        elif job_name == "wt_import_transform_single_4method_json":
            self.wt_import_transform_single_4method_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_transform_single_4method_json
        elif job_name == "wt_import_task_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_task_json
        elif job_name == "wt_import_alert_for_fail_task_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_alert_for_fail_task_json
        elif job_name == "wt_import_mysql_ddl_change_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_mysql_ddl_change_json
        elif job_name == "wt_import_transform_single_javaUdf_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_transform_single_javaUdf_json
        elif job_name == "wt_import_transform_muti_4method_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_transform_muti_4method_json
        elif job_name == "wt_import_transform_muti_dml_customerSql_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_transform_muti_dml_customerSql_json
        elif job_name == "wt_import_transform_muti_javaUdf_json":
            self.wt_import_task_json["syncJobDefinitionDtoList"][0]["jobDefinition"]["name"] = job_name
            json_data = self.wt_import_transform_muti_javaUdf_json
        res = self.wt_request.send_request(import_url, "post", json_data=json_data)
        return res.json()

    def get_wt_task_log(self, project_name, task_name, wt_type, key_log=None):
        """

        :param project_name:
        :param task_name:
        :param wt_type:
        :param key_log: 关键日志信息，找到就返回成功，否则
        :return:
        """
        wt_task_id = self.get_wt_instance_task_info(project_name, task_name, wt_type, 'id')
        params = {
            "taskInstanceId": wt_task_id,
            "limit": 1000,
            "skipLineNum": 0,
            "allLog": "true"
        }
        res = self.wt_request.send_request(self.wt_log_detail, "get", params=params,
                                           json_data=json)
        ws_instance_col = self.common_until.get_res_json_value(res, "log")
        return ws_instance_col

    def get_wt_instance_task_info(self, project_name, task_name, wt_type, col):
        project_code = self.wt_project.get_project_code(project_name)

        self.wt_task_instance_search["taskName"] = task_name
        self.wt_task_instance_search["projectCodes"] = project_code
        start_date = self.common_until.generate_time(0)
        end_date = self.common_until.generate_time(1)

        self.wt_task_instance_search["startDate"] = start_date
        self.wt_task_instance_search["endDate"] = end_date
        self.wt_task_instance_search["syncTaskType"] = wt_type
        self.wt_task_instance_search["jobMode"] = wt_type

        json = {"statusList": []}
        res = self.wt_request.send_request(self.wt_bath_task_instance_url, "post", params=self.wt_task_instance_search, json_data=json)
        ws_instance_col = self.common_until.get_res_json_value(res, col)
        print(ws_instance_col)
        return ws_instance_col

    def make_dag_plugin_id(self, source_mapper_json, sink_mapper_json):
        # 生成 plugin_id

        source_plugin_id = self.common_until.generate_timestamp_md5()
        sink_plugin_id = self.common_until.generate_timestamp_md5()

        self.wt_task_dag_line["edges"][0]["inputPluginId"] = source_plugin_id
        self.wt_task_dag_line["edges"][0]["targetPluginId"] = sink_plugin_id

        print("sss:{}".format(source_mapper_json))

        source_mapper_json["jobDAG"]["edges"][0]["inputPluginId"] = source_plugin_id
        source_mapper_json["jobDAG"]["edges"][0]["targetPluginId"] = sink_plugin_id
        try:
            source_mapper_json["pluginConfig"]["pluginId"] = source_plugin_id
            sink_mapper_json["jobDAG"]["edges"][0]["inputPluginId"] = source_plugin_id
            sink_mapper_json["jobDAG"]["edges"][0]["targetPluginId"] = sink_plugin_id
            sink_mapper_json["pluginConfig"]["pluginId"] = sink_plugin_id
        except Exception as e:
            return e

        return source_mapper_json, sink_mapper_json

    def replace_datasource_id(self, source_db, sink_db, source_mapper_json, sink_mapper_json):
        auto_test_data = self.create_db_data
        print(auto_test_data)
        print(auto_test_data[source_db])
        try:
            print(auto_test_data[source_db]["create_info"])
            source_plugin_name = auto_test_data[source_db]["create_info"]["pluginName"]
            sink_plugin_name = auto_test_data[sink_db]["create_info"]["pluginName"]
            source_datasource_name = auto_test_data[source_db]["create_info"]["datasourceName"]
            sink_datasource_name = auto_test_data[sink_db]["create_info"]["datasourceName"]
            source_mapper_json["pluginConfig"]["dataSourceId"] = self.d_api.get_datasource_info(source_plugin_name, source_datasource_name, "id")
            sink_mapper_json["pluginConfig"]["dataSourceId"] = self.d_api.get_datasource_info(sink_plugin_name, sink_datasource_name,"id")
        except Exception as e:
            print("替换datasource 失败，{}".format(e))
        return source_mapper_json, sink_mapper_json

    def replace_source_table_detail(self, source_mapper_json, database=None, table_with_db_list=None, tableFields=None):
        try:
            table_list = []

            # 多表模式
            if isinstance(table_with_db_list, list):
                print(table_with_db_list)
                source_mapper_json["pluginConfig"]["tableSelectFields"] = []
                source_mapper_json["pluginConfig"]["tableOption"]["tables"] = []
                source_mapper_json["pluginConfig"]["tableOption"]["databases"] = []
                source_mapper_json["pluginConfig"]["incrementOption"]["updateTableName"] = []

                for tmp in table_with_db_list:
                    try:
                        # 处理："source_table_is_customer": "qa_source@t1,qa_sink@t2" 情况
                        table_with_db = tmp.split("@")
                        if len(table_with_db) > 1:
                            database = table_with_db[0]
                            table = table_with_db[1]
                        # 处理："source_table_is_customer": "t1,t2" 情况
                        else:
                            table = tmp
                    except Exception as e:
                        print("通过'@'切割source_table_is_customer 的db库失败 {}".format(e))
                    table_list.append(table)
                    mapper = {'database': "", 'tableName': "", 'selectedFields': { 'all': True, 'tableFields': ""}}
                    mapper["database"] = database
                    mapper["tableName"] = table
                    mapper["tableFields"] = tableFields
                    source_mapper_json["pluginConfig"]["tableSelectFields"].append(mapper)
                    source_mapper_json["pluginConfig"]["tableOption"]["tables"].append(table)
                source_mapper_json["pluginConfig"]["tableOption"]["databases"].append(database)
                source_mapper_json["tableNames"] = [{"database": database, "tables": table_list}]
                source_mapper_json["pluginConfig"]["sceneMode"] = "MULTIPLE_TABLE"
            else:
                # 单表模式，处理 source_table_is_customer": "qa_source@t1"
                table_with_db = table_with_db_list.split("@")
                if len(table_with_db) > 1:
                    database = table_with_db[0]
                    table = table_with_db[1]
                else:
                    # 单表模式，处理 source_table_is_customer": "t1"
                    table = table_with_db_list
                table_list.append(table)
                source_mapper_json["pluginConfig"]["tableOption"]["databases"] = [database]
                source_mapper_json["pluginConfig"]["tableOption"]["tables"] = table_list

                # 复原单表参数：清理tableNames 以及 sceneMode
                source_mapper_json["pluginConfig"]["sceneMode"] = "SINGLE_TABLE"
                if "tableNames" in source_mapper_json:
                    del source_mapper_json["tableNames"]
        except Exception as e:
            print("table_split, Error promote {}".format(e))
        print("table_split, source_mapper_json is {}".format(source_mapper_json))
        return source_mapper_json

    def replace_source_table(self, source_db, source_mapper_json, source_table_is_customer=None, job_id=None, job_mode=None, job_type=None):
        table_fields = self.create_db_data[source_db]["table_fields"]
        tables = self.create_db_data[source_db]["filter_table_name"]
        database = self.create_db_data[source_db]["db_info"]

        try:
            tables = source_table_is_customer.split(",")
            if len(tables) > 1:
                tables = tables
            else:
                tables = source_table_is_customer
        except Exception as e:
            print("通过','切割source_table_is_customer 失败 {}".format(e))
        source_mapper_json = self.replace_source_table_detail(source_mapper_json, database, tables, table_fields)

        # 模型推演
        database = source_mapper_json["pluginConfig"]["tableOption"]["databases"][0]
        tables = source_mapper_json["pluginConfig"]["tableOption"]["tables"]
        table_select_fields = self.source_next_schame(source_mapper_json, database, tables, job_id, job_type)

        self.create_integra_res["source_push_res"] = table_select_fields

        return source_mapper_json

    def replace_sink_table_detail(self, sink_mapper_json, database, table):
        sink_mapper_json["pluginConfig"]["tableOption"]["databases"] = [database]
        sink_mapper_json["pluginConfig"]["tableOption"]["tables"] = [table]
        return sink_mapper_json

    def replace_sink_table(self, sink_db, sink_mapper_json, sink_table_is_customer):
        if sink_table_is_customer is not None:
            sink_table = sink_table_is_customer
        else:
            sink_table = "${table_name}"

        if sink_db in ["JDBC-Mysql"]:
            self.replace_sink_table_detail(sink_mapper_json, "qa_sink", "{}".format(sink_table))
        elif sink_db in ["JDBC-Postgres"]:
            self.replace_sink_table_detail(sink_mapper_json, "qa_sink", "public.{}".format(sink_table))
        elif sink_db in ["JDBC-Oracle"]:
            self.replace_sink_table_detail(sink_mapper_json, "ORCL", "QA_SINK.{}".format(sink_table))
        elif sink_db in ["JDBC-SQLServer"]:
            self.replace_sink_table_detail(sink_mapper_json, "qa_sink", "dbo.{}".format(sink_table))
        elif sink_db in ["JDBC-DB2"]:
            self.replace_sink_table_detail(sink_mapper_json, "CICDDB2", "QA_SINK.{}".format(sink_table))
        elif sink_db in ["JDBC-KingBase"]:
            self.replace_sink_table_detail(sink_mapper_json, "qa_sink", "PUBLIC.{}".format(sink_table))
        elif sink_db in ["JDBC-DAMENG"]:
            self.replace_sink_table_detail(sink_mapper_json, "DAMENG", "QA_SINK.{}".format(sink_table))
        elif sink_db in ["S3", "FtpFile", "SftpFile", "LocalFile", "Kafka"]:
            self.replace_sink_table_detail(sink_mapper_json, "default", "{}".format(sink_table))
        print("aaaa: 123 {}".format(sink_mapper_json))
        return sink_mapper_json

    def update_task_id_in_url(self, url, key, task_id=''):
        """
        update_task_id_in_url("http://st1:12345/dolphinscheduler/whaletunnel/api/v1/job/task/13510099281056","task","") 表示清空task后面url
        :param url:
        :param key:
        :param task_id:
        :return:
        """
        if task_id in ['', None, "None"]:
            url = re.sub(r'/{}/.*\d+'.format(key), '/{}'.format(key), url)
        else:
            url = url + "/" + task_id
        return url

    def query_alert_detail(self, job_name):
        res = self.search_stream_task(job_name)
        for instance_name in res.keys():
            instance_id = instance_name.split("-")[1]
        self.wt_alert_detail_request_json["processInstanceId"]=instance_id
        tmp = self.wt_request.send_request(self.wt_query_alert_detail_url, "post", json_data=self.wt_alert_detail_request_json)
        res = tmp.json()
        print(res)
        return res

    def get_whaletunnel_api_v1_all_diff_info(self, job_id):
        url = "http://up:12345/dolphinscheduler/whaletunnel/api/v1/job/{}?allDiff=true".format(job_id)
        parameters = {"allDiff": "true"}
        tmp = self.wt_request.send_request(url, method='GET', params=parameters)
        res = tmp.json()
        catalogTables = res["data"]["plugins"][0]
        print(res)
        print(catalogTables)


    def source_next_schame(self, source_mapper_json, database, tables, job_id, job_type="DATA_INTEGRATION"):

        sceneMode = source_mapper_json["pluginConfig"]["sceneMode"]


        # 多表模式
        if sceneMode == "MULTIPLE_TABLE":
            # table_list = tables.split(",")
            source_mapper_json["pluginConfig"]["tableSelectFields"] = []
            source_mapper_json["pluginConfig"]["incrementOption"]['updateTableName'] = "qa_source.aaa1,aaa1"
            parameters = {"datasourceId": source_mapper_json["pluginConfig"]["dataSourceId"], "jobDefineType": job_type, "jobVersionId": job_id}
            payload = {"pluginConfig": source_mapper_json["pluginConfig"], "tableNames": [{"database": database, "tables": tables}]}
            url = self.wt_schemas_next_url
        # 单表、增量模式
        else:
            parameters = {"datasourceId": source_mapper_json["pluginConfig"]["dataSourceId"],
                      "databaseName": database,
                      "tableName": tables[0],
                      "jobDefineType": job_type,
                      "jobVersionId": job_id
                        }
            payload = source_mapper_json["pluginConfig"]
            url = self.wt_schema_next_url
        try:
            schemaNext_res = self.wt_request.send_request(url, method='POST', params=parameters, json_data=payload)
            data = schemaNext_res.json()
            print(data)
        except Exception as e:
            error_log = "模型推演失败".format(e)
            print(error_log)
            return False

        fields = data['data'][0]['tableInfos'][0]['fields']
        table_fields = [field['name'] for field in fields]
        select_fields = {"database": data['data'][0]['database'],
                         "tableName": data['data'][0]['tableInfos'][0]['tableName'],
                         "selectedFields": {"all": False, "tableFields": table_fields}}

        # 构建输出的JSON数据
        if sceneMode == "MULTIPLE_TABLE":
            table_select_fields = [select_fields, select_fields]
        else:
            table_select_fields = [select_fields]

        return table_select_fields

    def search_stream_task(self, project_name, job_name=None):
        project_code = self.wt_project.get_project_code(project_name)
        if self.sys_version == "2.7":
            self.wt_search_url = self.wt_search_27_url
        if job_name:
            self.wt_search_url = self.update_task_id_in_url(self.wt_search_url, "task")
            self.wt_search_url = self.update_task_id_in_url(self.wt_search_url, "task", str(job_name))
        self.wt_search_url = re.sub(r"projectCodes=[^&]+", "projectCodes="+str(project_code), self.wt_search_url)
        print(self.wt_search_url)
        res = self.wt_request.send_request(self.wt_search_url, "POST", json_data=self.cdc_task_search_data)
        res = res.json()["data"]
        res_d = {}
        for t in res["totalList"]:
            name = t["name"]
            res_d[name] = t["id"]
        return res_d

    def stop_stream_task(self, project_name, num=1):
        d = self.search_stream_task(project_name)
        ids = list(d.values())
        if len(ids) == 0:
            print("无正在运行的流任务，无需停止")
        else:
            self.cdc_task_stop_data["ids"] = ids
            res = self.wt_request.send_request(self.wt_stop_url, "post", params=self.cdc_task_stop_data)
            print("停止流任务 list是: {}, 请求结果：{}".format(d.keys(), res.text))
        time.sleep(num)

    def deep_update(self, data, updates):
        """
        递归地更新字典，确保完全替换指定的嵌套字典。
        :param data: 原始字典。
        :param updates: 包含更新的字典。
        :return: 更新后的字典。
        """
        for key, value in updates.items():
            if isinstance(value, dict) and key in data and isinstance(data[key], dict):
                # 如果值是字典且对应的键在data中，且data中的值也是字典，则递归更新
                data[key] = deep_update(data.get(key, {}), value)
            else:
                # 否则，直接更新值
                data[key] = value
        return data

    def create_wt_integra_task(self, project_name, task_name, job_mode, source_db, sink_db,
                               source_table_is_customer=None,
                               sink_table_is_customer=None,
                               source_config=None, sink_config=None, job_type="DATA_INTEGRATION",
                               choice_db_version=None):
        """
        批：m.create_wt_integra_task("v26", "v26_auto_mysql_to_mysql", "BATCH", "JDBC-Mysql", "JDBC-Mysql")
        流：m.create_wt_integra_task("v26", "v26_auto_mysql_to_mysql", "STREAMING", "JDBC-Mysql", "JDBC-Mysql")
        :param project_name: v26
        :param task_name: v26_auto_mysql_to_mysql
        :param job_mode: BATCH or STREAMING
        :param job_type: DATA_INTEGRATION or INCREMENT_SYNC or DATA_REPLICA
        :param source_db: JDBC-Mysql
        :param sink_db: JDBC-Mysql
        :param source_table_is_customer: 自定义表名
        :param source_config: 自定义source的config配置
        :param sink_config: 自定义sink的config配置
        :param choice_db_version: 自定义db的版本
        :return:
        """
        print(source_config)
        print(sink_config)
        print(job_type)
        self.wt_integration_job_define["jobMode"] = job_mode
        self.wt_integration_job_define["jobType"] = job_type
        self.wt_integration_job_define["name"] = task_name

        project_code = self.wt_project.get_project_code(project_name)
        self.wt_integration_job_define["projectCode"] = project_code

        job_json = self.wt_integration_job_define

        # 创建 job
        res = self.wt_request.send_request(self.wt_job_define_url, method='POST', json_data=job_json)
        job_id = self.common_until.get_res_json_value(res, "data")

        # 先清空url ,在拼接url
        self.wt_task_define_url = self.update_task_id_in_url(self.wt_task_define_url, "task")
        self.wt_task_define_url = self.update_task_id_in_url(self.wt_task_define_url, "task", str(job_id))

        self.create_integra_res["job_create_res"] = res.json()

        # 获取source与sink创建的mapper_json 模版
        try:
            source_mapper_json = self.get_connector_mapper_json(source_db=source_db)

            sink_mapper_json = self.get_connector_mapper_json(sink_db=sink_db)

            # 替换 source/sink config 值
            if source_config is not None:
                source_mapper_json = self.deep_update(source_mapper_json, source_config)
            if sink_config is not None:
                sink_mapper_json = self.deep_update(sink_mapper_json, sink_config)

            print("source_mapper_json是xx: {}".format(source_mapper_json))
            print("sink_mapper_json是xx: {}".format(sink_mapper_json))

            # 生成 plugin_id
            source_mapper_json, sink_mapper_json = self.make_dag_plugin_id(source_mapper_json, sink_mapper_json)
            # 替换 数据源id
            source_mapper_json, sink_mapper_json = self.replace_datasource_id(source_db, sink_db, source_mapper_json,
                                                                              sink_mapper_json)

            # 替换 source ，切进行模型推演
            source_mapper_json = self.replace_source_table(source_db, source_mapper_json, source_table_is_customer,
                                                           job_id, job_mode, job_type)

            # 替换 sink 表数据
            sink_mapper_json = self.replace_sink_table(sink_db, sink_mapper_json, sink_table_is_customer)

            logger.info("开始替换sink 表数据：connector_jdbc_mappers_sink {}".format(sink_mapper_json))
        except Exception as e:
            print(e)



        # 创建 source
        source_res = self.wt_request.send_request(self.wt_task_define_url, method='POST', json_data=source_mapper_json)
        self.common_until.get_res_is_success(source_res, "创建 {} 同步任务source".format(task_name))

        # 创建 sink
        sink_res = self.wt_request.send_request(self.wt_task_define_url, method='POST', json_data=sink_mapper_json,
                                                params={"refresh": True})
        self.common_until.get_res_is_success(source_res, "创建 {} sink".format(task_name))

        # 创建 config
        self.wt_job_config_url = self.update_task_id_in_url(self.wt_job_config_url, "config")
        self.wt_job_config_url = self.update_task_id_in_url(self.wt_job_config_url, "config", str(job_id))

        self.wt_task_config_json["name"] = task_name
        conf_res = self.wt_request.send_request(self.wt_job_config_url, method='PUT',
                                                json_data=self.wt_task_config_json)
        self.common_until.get_res_is_success(source_res, "创建 {} config".format(task_name))

        # 连线
        self.wt_job_dag_url = self.update_task_id_in_url(self.wt_job_dag_url, "dag")
        self.wt_job_dag_url = self.update_task_id_in_url(self.wt_job_dag_url, "dag", str(job_id))
        line_res = self.wt_request.send_request(self.wt_job_dag_url, method='POST', json_data=self.wt_task_dag_line)
        self.common_until.get_res_is_success(source_res, "创建 {} 连线".format(task_name))

        try:
            self.create_integra_res["line_create_res"] = line_res.json()
            self.create_integra_res["config_create_res"] = conf_res.json()
            self.create_integra_res["sink_create_res"] = sink_res.json()
            self.create_integra_res["source_create_res"] = source_res.json()
            self.create_integra_res["job_id"] = job_id
        except Exception as e:
            print(e)

        return self.create_integra_res


if __name__ == '__main__':
    m = WtTaskAPI()
    m.stop_stream_task("lzw")
    # m.get_wt_define_task_id("auto_test_project","100281_v26_fullread_batch_mysql_to_mysql")
    # 批创建
    # m.create_wt_integra_task("v26", "v26_auto_batch_mysql_to_mysql2", "BATCH", "JDBC-Mysql", "JDBC-Mysql")
    # m.create_ws_task("v26", "v26_auto_batch_mysql_to_mysql", "BATCH")
    # m.run_ws_task("v26", "v26_auto_batch_mysql_to_mysql", "BATCH")
    # m.block_until_task_complete("v26", "auto_mysql_to_mysql", "BATCH")
    # m.get_wt_instance_task_info("v26", "v26_auto_batch_mysql_to_mysql", "BATCH", "state")
    # m.get_wt_instance_task_info("v26", "v26_auto_batch_mysql_to_mysql", "BATCH", "writeRowCount")
    # m.get_wt_instance_task_info("v26", "v26_auto_batch_mysql_to_mysql", "BATCH", "readRowCount")

    # 流创建
    # m.create_wt_integra_task("v26", "v26_auto_cdc_mysql_to_mysql2", "STREAMING", "MySQL-CDC", "JDBC-Mysql")
    # m.create_ws_task("v26", "v26_auto_cdc_mysql_to_mysql", "STREAMING")
    # m.run_ws_task("v26", "v26_auto_cdc_mysql_to_mysql", "STREAMING")
    # m.block_until_task_complete("v26", "v26_auto_cdc_mysql_to_mysql", "STREAMING")
    # m.get_wt_instance_task_info("v26", "v26_auto_cdc_mysql_to_mysql", "STREAMING", "state")
    # m.get_wt_instance_task_info("v26", "v26_auto_cdc_mysql_to_mysql", "STREAMING", "writeRowCount")
    # m.get_wt_instance_task_info("v26", "v26_auto_cdc_mysql_to_mysql", "STREAMING", "readRowCount")
