import os
import re

mysql_hive_type_map = {
    'varchar': 'String'
    , 'text': 'String'
    , 'double': 'Double'
}

# 表及更新时间字段的映射
mysql_table_update_time_column_map = {
    "base_acd_file": "gxsj",
    "base_bd_drivinglicense": "gxsj",
    "base_bd_vehicle": "gxrq",
    "base_vio_force": "gxsj",
    "base_vio_surveil": "gxsj",
}

table_sep_map = {
    "base_acd_file": "|",
    "base_acd_filehuman": ",",
    "base_bd_drivinglicense": ",",
    "base_bd_vehicle": "|",
    "base_cards_pos": ",",
    "base_vio_force": ",",
    "base_vio_surveil": ","
}

table_pk_map = {
    "base_acd_file": "sgbh",
    "base_bd_drivinglicense": "dabh",
    "base_bd_vehicle": "xh",
    "base_vio_force": "xh",
    "base_vio_surveil": "xh"
}

dim_table_set = {"base_acd_filehuman","base_cards_pos","base_bd_drivinglicense","base_bd_vehicle"}

# 遍历MySQL的建表语句
for file in os.listdir("./mysql_ddl"):
    # 加载每一张表的建表语句，并通过正则提取表的列信息
    mysql_table_name = file.split(".")[0]
    with open(f"./mysql_ddl/{file}", mode='r', encoding='utf8') as f:
        sql_str = "".join(f.readlines())
        # re.S 表示可以跨行匹配
        columns_str = re.search('\((.*)\) ENGINE', sql_str, re.S).group(1)
        # 定义Hive建表语句中每一列的模板
        hive_column_format = '`{column_name}` {column_type} COMMENT {column_comment}'
        hive_column_list = []
        column_list = []
        column_source_list = []
        datax_hdfs_column_format = '"name": "{column_name}","type": "{column_hive_type}"'
        datax_hdfs_column_list = []
        for column_str in columns_str.strip().split(",\n"):
            splits = column_str.strip().split(" ")
            column_name = splits[0].replace("`", "")
            column_source_list.append(splits[0])
            column_list.append(f'"{column_name}"')
            column_mysql_type = splits[1].split("(")[0]
            # 通过类型映射map转换成hive的类型
            column_hive_type = mysql_hive_type_map.get(column_mysql_type, 'stop')
            datax_hdfs_column_list.append(
                "{" + datax_hdfs_column_format.format(column_name=column_name, column_hive_type=column_hive_type) + "}")
            # if column_hive_type == 'stop':
            #     print(column_mysql_type)
            #     exit(-100)
            comment = splits[-1]
            hive_column = hive_column_format.format(column_name=column_name, column_type=column_hive_type,
                                                    column_comment=comment)
            hive_column_list.append(hive_column)
        columns = ",\n".join(hive_column_list)
        # 加载ods的建表语句模板
        hive_table_name = f"ods_{mysql_table_name}_d_f"
        with open("./template/ods_create_table.sql", mode='r', encoding='utf8') as f2:
            hive_create_table = "".join(f2.readlines()).replace("{hive_table_name}", hive_table_name).replace(
                "{columns}", columns).replace("{table_sep}", table_sep_map.get(mysql_table_name, ","))
            # 将全量表的建表语句保存
            with open(f"./ddl/{hive_table_name}.sql", mode='w', encoding='utf8') as f3:
                f3.write(hive_create_table)
        # 是否为维表
        flag = False
        if flag:
            if mysql_table_name not in dim_table_set:
                # 加载dwd的建表语句模板
                dwd_hive_table_name = f"dwd_{mysql_table_name}_msk_d"
                with open("./template/dwd_create_table.sql", mode='r', encoding='utf8') as f2:
                    dwd_hive_create_table = "".join(f2.readlines()).replace("{dwd_hive_table_name}", dwd_hive_table_name).replace(
                        "{columns}", columns).replace("{table_sep}", table_sep_map.get(mysql_table_name, ","))
                    # 将全量表的建表语句保存
                    with open(f"../dwd/ddl/{dwd_hive_table_name}.sql", mode='w', encoding='utf8') as f3:
                        f3.write(dwd_hive_create_table)
                # 加载dwd的sql语句模板
                with open("./template/dwd_sql_template.sql", mode='r', encoding='utf8') as f2:
                    dwd_insert_table = "".join(f2.readlines()).replace("{dwd_hive_table_name}", dwd_hive_table_name).replace(
                        "{ods_hive_table_name}", hive_table_name).replace(
                        "{columns}", "\n,".join(column_source_list))
                    # 将全量表的建表语句保存
                    with open(f"../dwd/dql/insert_{dwd_hive_table_name}.sql", mode='w', encoding='utf8') as f3:
                        f3.write(dwd_insert_table)
            else:
                # 加载dim的建表语句模板
                dim_hive_table_name = f"dim_{mysql_table_name}_msk_d"
                with open("./template/dim_create_table.sql", mode='r', encoding='utf8') as f2:
                    dim_hive_create_table = "".join(f2.readlines()).replace("{dim_hive_table_name}", dim_hive_table_name).replace(
                        "{columns}", columns).replace("{table_sep}", table_sep_map.get(mysql_table_name, ","))
                    # 将全量表的建表语句保存
                    with open(f"../dim/ddl/{dim_hive_table_name}.sql", mode='w', encoding='utf8') as f3:
                        f3.write(dim_hive_create_table)
                # 加载dim的sql语句模板
                with open("./template/dim_sql_template.sql", mode='r', encoding='utf8') as f2:
                    dim_insert_table = "".join(f2.readlines()).replace("{dim_hive_table_name}", dim_hive_table_name).replace(
                        "{ods_hive_table_name}", hive_table_name).replace(
                        "{columns}", "\n,".join(column_source_list))
                    # 将全量表的建表语句保存
                    with open(f"../dim/dql/insert_{dim_hive_table_name}.sql", mode='w', encoding='utf8') as f3:
                        f3.write(dim_insert_table)
        # 如果表存在更新时间字段才生成增量建表语句及增量采集脚本
        # 加载ods的建表语句模板
        if mysql_table_name in mysql_table_update_time_column_map:
            hive_incr_table_name = f"ods_{mysql_table_name}_d_i"
            with open("./template/ods_create_table.sql", mode='r', encoding='utf8') as f4:
                hive_create_table = "".join(f4.readlines()).replace("{hive_table_name}", hive_incr_table_name).replace(
                    "{columns}", columns).replace("{table_sep}", table_sep_map.get(mysql_table_name, ","))
                # 将增量表的建表语句保存
                with open(f"./ddl/incr/{hive_incr_table_name}.sql", mode='w', encoding='utf8') as f5:
                    f5.write(hive_create_table)
            with open("./template/datax_incr_template.json", mode='r', encoding='utf8') as f6:
                datax_json = "".join(f6.readlines()) \
                    .replace("{mysql_table_name}", mysql_table_name) \
                    .replace("{hive_incr_table_name}", hive_incr_table_name) \
                    .replace("{datax_mysql_columns}", ",".join(column_list)) \
                    .replace("{datax_hdfs_columns}", ",".join(datax_hdfs_column_list)) \
                    .replace("{update_time_column}", mysql_table_update_time_column_map.get(mysql_table_name)) \
                    .replace("{table_sep}", table_sep_map.get(mysql_table_name, ","))
            with open(f"./datax/incr/{hive_incr_table_name}.json", mode='w', encoding='utf8') as f7:
                f7.write(datax_json)
            # 加载merge合并增全量sql模板
            with open("./template/merge_sql_template.sql", mode='r', encoding='utf8') as f10:
                merge_sql = "".join(f10.readlines()).replace("{full_table_name}",hive_table_name) \
                                                     .replace("{incr_table_name}",hive_incr_table_name) \
                                                     .replace("{columns}",",".join(column_source_list)) \
                                                     .replace("{table_pk}",table_pk_map[mysql_table_name]) \
                                                     .replace("{update_time_column}",mysql_table_update_time_column_map.get(mysql_table_name))
                # 将增全量合并的SQL保存
                with open(f"./merge/merge_{hive_incr_table_name}.sql", mode='w', encoding='utf8') as f11:
                    f11.write(merge_sql)
        # 加载datax采集脚本生成模板
        with open("./template/datax_template.json", mode='r', encoding='utf8') as f8:
            datax_json = "".join(f8.readlines()) \
                .replace("{mysql_table_name}", mysql_table_name) \
                .replace("{hive_table_name}", hive_table_name) \
                .replace("{datax_mysql_columns}", ",".join(column_list)) \
                .replace("{datax_hdfs_columns}", ",".join(datax_hdfs_column_list)) \
                .replace("{table_sep}", table_sep_map.get(mysql_table_name, ","))
            with open(f"./datax/{hive_table_name}.json", mode='w', encoding='utf8') as f9:
                f9.write(datax_json)
