package com.qm.datax;

import cn.hutool.json.JSONUtil;
import com.qm.datax.beans.Table;
import com.qm.datax.configuration.Configuration;
import com.qm.datax.helper.CKHelper;
import com.qm.datax.helper.DataxJsonHelper;
import com.qm.datax.helper.MysqlHelper;

import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;

import static cn.hutool.core.lang.Console.log;

/**
 * @Author：wangshali
 * @CreateTime：2024-10-31
 * @Description：
 * @Source:
 * @Sink:
 * @Version：v1.0
 */

public class Main {
    public static void main(String[] args) throws IOException {

        genJson();
    }


    // 自动生成  datax的json文件
    private static void genJson() throws IOException {

        if (Configuration.EXPORT_OUT_DIR != null && !Configuration.EXPORT_OUT_DIR.isEmpty()) {

            // 迁移类型
            String migrationType = Configuration.MIGRATION_TYPE;
            // 同步方式
            String synchronizeType = Configuration.SYNCHRONIZE_TYPE;

            String mydb = Configuration.ARGS;

            // 输出路径
            Files.createDirectories(Paths.get(Configuration.EXPORT_OUT_DIR));

            if ("h2c".equals(migrationType)) {  //hdfs -> ck  daas

                log("[hdfsReader] 开始生成daas层配置文件...");

                CKHelper ckHelper = new CKHelper(
                        Configuration.CK_URL,
                        Configuration.CK_DATABASE,
                        Configuration.CK_TABLES);

                System.out.println(Configuration.CK_TABLES);

                DataxJsonHelper dataxJsonHelper = new DataxJsonHelper();

                // 获取表 ,分割
                List<Table> tables = ckHelper.getTables();

                // 补全json
                dataxJsonHelper.setColumns(tables.get(0), migrationType, synchronizeType);
                dataxJsonHelper.setTable(tables.get(0), synchronizeType);

                // 输出最终Json
                FileWriter outputWriter = new FileWriter(Configuration.EXPORT_OUT_DIR + "/" + tables.get(0).name() + ".json");
                JSONUtil.toJsonStr(dataxJsonHelper.getDassConfig(), outputWriter);
                outputWriter.close();

                log("[hdfsReader] 生成daas层配置文件成功！！！");

            } else if ("m2h".equals(migrationType)) {  // mysql -> hdfs   stg

                log("[hdfsWriter] 开始生成stg层配置文件...");

                MysqlHelper mysqlHelper = new MysqlHelper(
                        Configuration.MYSQL_URL,
                        Configuration.MYSQL_DATABASE,
                        Configuration.MYSQL_TABLES);

                DataxJsonHelper dataxJsonHelper = new DataxJsonHelper();

                // 获取表 ,分割
                List<Table> tables = mysqlHelper.getTables();


                // 补全json
                dataxJsonHelper.setColumns(tables.get(0), migrationType, synchronizeType);
                dataxJsonHelper.setTable(tables.get(0), synchronizeType);

                // 输出最终Json配置
                FileWriter inputWriter = new FileWriter(Configuration.IMPORT_OUT_DIR + "/" + "stg_" + mydb.split("_")[1] + "_" + tables.get(0).name() + "_" + synchronizeType + ".json");
                JSONUtil.toJsonStr(dataxJsonHelper.getStgConfig(Configuration.SYNCHRONIZE_TYPE), inputWriter);
                inputWriter.close();

                log("[hdfsWriter] 生成stg层配置文件成功！！！");
            }
        }
    }


    // 自动生成stg层的建表ddl
    public static void stgDDL() throws IOException {
        log("开始生成stg层的建表ddl...");


    }

}
