package com.ddone.datax.helper;

import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.ddone.datax.bean.DataXTable;
import com.ddone.datax.bean.Mysql2HdfsConfig;
import com.ddone.datax.util.LocalFileReaderUtil;

/**
 * @author ddone
 * @date 2024/5/27-21:07
 */
public class DataXJsonHelper {
    // 解析 inputConfig 和 outputConfig 模板

    // Hadoop 单点集群
    private final JSONObject inputConfig = JSONUtil.parseObj(LocalFileReaderUtil.getResourceFileContent("mysql2hdfs.json"));
    private final JSONObject outputConfig = JSONUtil.parseObj(LocalFileReaderUtil.getResourceFileContent("hdfs2mysql.json"));


    public DataXJsonHelper() {
        // 获取 Reader 和 Writer 配置
        JSONObject mysqlReaderPara = inputConfig.getByPath("job.content[0].reader.parameter", JSONObject.class);
        JSONObject hdfsWriterPara = inputConfig.getByPath("job.content[0].writer.parameter", JSONObject.class);
        JSONObject hdfsReaderPara = outputConfig.getByPath("job.content[0].reader.parameter", JSONObject.class);
        JSONObject mysqlWriterPara = outputConfig.getByPath("job.content[0].writer.parameter", JSONObject.class);

        // 设置 DefaultFS
        hdfsReaderPara.set("defaultFS", Mysql2HdfsConfig.HDFS_URI);
        hdfsWriterPara.set("defaultFS", Mysql2HdfsConfig.HDFS_URI);
        hdfsWriterPara.set("path", Mysql2HdfsConfig.HDFS_BASE_PATH);
        hdfsReaderPara.set("path", Mysql2HdfsConfig.HDFS_BASE_PATH);

        // 设置 MySQL Username
        mysqlReaderPara.set("username", Mysql2HdfsConfig.MYSQL_USER);
        mysqlWriterPara.set("username", Mysql2HdfsConfig.MYSQL_USER);

        // 设置 MySQL Password
        mysqlReaderPara.set("password", Mysql2HdfsConfig.MYSQL_PASSWORD);
        mysqlWriterPara.set("password", Mysql2HdfsConfig.MYSQL_PASSWORD);

        // 设置 JDBC URL
        mysqlReaderPara.putByPath("connection[0].jdbcUrl[0]", Mysql2HdfsConfig.MYSQL_URL_IMPORT);
        mysqlWriterPara.putByPath("connection[0].jdbcUrl", Mysql2HdfsConfig.MYSQL_URL_EXPORT);

        // 写回Reader和Writer配置
        inputConfig.putByPath("job.content[0].reader.parameter", mysqlReaderPara);
        inputConfig.putByPath("job.content[0].writer.parameter", hdfsWriterPara);
        outputConfig.putByPath("job.content[0].reader.parameter", hdfsReaderPara);
        outputConfig.putByPath("job.content[0].writer.parameter", mysqlWriterPara);
    }

    public void setTableAndColumns(DataXTable table, int index, String migrationType) {
        // 设置表名
        setTable(table, index, migrationType);
        // 设置列名及路径
        setColumns(table, migrationType);
    }

    public void setColumns(DataXTable table, String migrationType) {
        if ("import".equals(migrationType)) {
            // 设置 hdfswriter 文件名
            inputConfig.putByPath("job.content[0].writer.parameter.fileName", table.name());
            // 设置列名
            inputConfig.putByPath("job.content[0].reader.parameter.column", table.getColumnNames());
            inputConfig.putByPath("job.content[0].writer.parameter.column", table.getColumnNamesAndTypes());
        } else {
            // 设置列名
            outputConfig.putByPath("job.content[0].writer.parameter.column", table.getColumnNames());
        }
    }

    public void setTable(DataXTable table, int index, String migrationType) {
        if ("import".equals(migrationType)) {
            // 设置表名
            inputConfig.putByPath("job.content[0].reader.parameter.connection[0].table[" + index + "]", table.name());
            inputConfig.putByPath("job.content[0].writer.parameter.path", Mysql2HdfsConfig.HDFS_BASE_PATH+"/"+table.name());
        } else {
            outputConfig.putByPath("job.content[0].writer.parameter.connection[0].table[" + index + "]", table.name());
        }
    }

    public JSONObject getInputConfig() {
        return inputConfig;

    }

    public JSONObject getOutputConfig() {
        return outputConfig;
    }

    public static void main(String[] args) {
        System.out.println(LocalFileReaderUtil.getResourceFileContent("mysql2hdfs.json"));
    }
}
