# 用于解析MySQL表的结构批量生成DataX采集脚本、Hive建表语句等
import re
import os
mysql_hive_type_mapping = {
    "varchar":"string"
    ,"text":"string"
    ,"bigint":"bigint"
    ,"double":"double"
}

dwd_table_set = {
    "base_acd_file"
    ,"base_vio_force"
    ,"base_vio_surveil"
}
# 依次遍历每一个建表语句
for file_name in os.listdir("./ddl/mysql"):
    mysql_table_name = file_name.split(".")[0]
    hive_table_name = f"ods_{mysql_table_name}_d_f"
    with open(f"./ddl/mysql/{file_name}",mode='r',encoding='utf8') as f:
        table_create_sql = "".join(f.readlines())
        hive_columns = []
        datax_columns = []
        # 通过匹配括号中的内容借助分组提取出列的信息，re.S表示跨行匹配
        for column in re.search(r"\((.*)\)",table_create_sql,re.S).group(1).strip().split(","):
            splits = column.strip().split(" ")
            column_name = splits[0]
            datax_column_name = column_name.replace('`','')
            column_type = mysql_hive_type_mapping.get(splits[1].split("(")[0].lower(),"String")
            column_comment = splits[-1]
            hive_column = f'{column_name} {column_type} comment {column_comment}'
            hive_columns.append(hive_column)
            # {"name":"列名","type":"列的类型"}
            datax_column = '{'+ f'"name":"{datax_column_name}","type":"{column_type}"' + '}'
            datax_columns.append(datax_column)
        ########## 加载Hive ODS建表语句模板 ##########
        with open("./template/hive_ods_create_table_template.sql",mode='r',encoding='utf8') as ods_f:
            hive_ods_create_table = "".join(ods_f.readlines()).replace("{hive_table_name}",hive_table_name).replace("{hive_columns}",",\n    ".join(hive_columns))
            # 保存到文件
            with open(f"./ddl/hive/{hive_table_name}.sql",mode='w',encoding='utf8') as ods_w:
                ods_w.write(hive_ods_create_table)
        # 基于DWD的表集合判断表是否为事实表
        if mysql_table_name in dwd_table_set:
            ########## 加载Hive DWD建表语句模板 ##########
            dwd_table_name = f"dwd_{mysql_table_name}_msk_d"
            with open("./template/hive_dwd_create_table_template.sql",mode='r',encoding='utf8') as dwd_f:
                hive_dwd_create_table = "".join(dwd_f.readlines()).replace("{mysql_table_name}",mysql_table_name).replace("{hive_columns}",",\n    ".join(hive_columns))
                # 保存到文件
                with open(f"../dwd/ddl/{dwd_table_name}.sql",mode='w',encoding='utf8') as dwd_w:
                    dwd_w.write(hive_dwd_create_table)
        else:
            ########## 加载Hive DIM建表语句模板 ##########
            dim_table_name = f"dim_{mysql_table_name}_msk_d"
            with open("./template/hive_dim_create_table_template.sql",mode='r',encoding='utf8') as dim_f:
                hive_dim_create_table = "".join(dim_f.readlines()).replace("{mysql_table_name}",mysql_table_name).replace("{hive_columns}",",\n    ".join(hive_columns))
                # 保存到文件
                with open(f"../dim/ddl/{dim_table_name}.sql",mode='w',encoding='utf8') as dim_w:
                    dim_w.write(hive_dim_create_table)
        ########## 加载DataX采集配置模板 ##########
        with open("./template/datax_mysql_to_hdfs_template.json",mode='r',encoding='utf8') as datax_f:
            datax_json = "".join(datax_f.readlines()).replace("{hive_table_name}",hive_table_name) \
                .replace("{mysql_table_name}",mysql_table_name) \
                .replace("{datax_columns}",",".join(datax_columns))
            # 保存到文件
            with open(f"./datax/{hive_table_name}.json",mode='w',encoding='utf8') as datax_w:
                datax_w.write(datax_json)