package org.danan.spark2hudi.app;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.danan.spark2hudi.utils.DateFormatUtil;
import org.danan.spark2hudi.bean.DatabaseConfig;
import org.danan.spark2hudi.utils.ConfigUtil;

import java.util.Properties;

import static org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs;

/**
 * Created with IntelliJ IDEA.
 *
 * @Author: NanHuang
 * @Date: 2023/07/05/14:29
 * @Description: 采集数据库数据，写入Hudi
 */
public class Database2Hudi {

    private static final String MYSQL_DRIVER = "com.mysql.jdbc.Driver";
    private static final String SQLSERVER_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
    private static final String ORACLE_DRIVER = "oracle.jdbc.OracleDrive";

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME","danan");
        // 1 获取参数（格式：医院首字母缩写_数据库名 表名 主键）
        if (args.length != 3){
            throw new RuntimeException("Please set the correct parameters！ [ 1.database 2.table 3.key ]");
        }
        String databaseName = args[0].toLowerCase();
        String tableName = args[1];
        String keyName = args[2];
        String hdfsPath = null;
        String namePrefix = null;

        // 2 获取数据库配置信息
        DatabaseConfig databaseConfig = null;
        switch (databaseName){
            case "sxyc_his":
                databaseConfig = ConfigUtil.getSxycHisConfig();
                hdfsPath = "sxyc/his";
                namePrefix = "SXYC_";
                break;
            case "sxyc_emr":
                databaseConfig = ConfigUtil.getSxycEmrConfig();
                hdfsPath = "sxyc/emr";
                namePrefix = "SXYC_";
                break;
            case "scyy_his/emr":
                databaseConfig = ConfigUtil.getScyyHisEmrConfig();
                hdfsPath = "scyy/his_emr";
                namePrefix = "SCYY_";
                break;
            case "scyy_pacs":
                databaseConfig = ConfigUtil.getScyyPacsConfig();
                hdfsPath = "scyy/pacs";
                namePrefix = "SCYY_";
                break;
            case "szgc_his/emr":
                databaseConfig = ConfigUtil.getSzgcHisEmrConfig();
                hdfsPath = "szgc/his_emr";
                namePrefix = "SZGC_";
                break;
            case "szgc_pacs":
                databaseConfig = ConfigUtil.getSzgcPacsConfig();
                hdfsPath = "szgc/pacs";
                namePrefix = "SZGC_";
                break;
            case "test":
                databaseConfig = ConfigUtil.getTestConfig();
                hdfsPath = "test";
                namePrefix = "TEST_";
                break;
            default:
                throw new RuntimeException("Please set the correct name of database！ [ example: sxyc_his ]");
        }

        // 3 创建配置对象
        SparkConf conf = new SparkConf()
                .setMaster("local[2]")
                .setAppName("Database2Hudi")
                .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                .set("spark.sql.legacy.timeParserPolicy", "LEGACY");
//                .set("spark.sql.avro.datetimeRebaseModeInWrite","；CORRECTED");

        try (SparkSession spark = SparkSession.builder().config(conf).getOrCreate()) {

            // 4 从源读取数据
            System.out.println("开始时间为：" + DateFormatUtil.toYmdHms(System.currentTimeMillis()));

            Properties properties = getProperties(databaseConfig);

            Dataset<Row> ds = spark.read()
                    .jdbc(databaseConfig.getUrl(), tableName, properties)
                    .orderBy("pk")
                    .where("pk in (87852579,87852580)");

            // 5 写入hudi
            ds.write().format("hudi")
                    .options(getQuickstartWriteConfigs())
                    .option("hoodie.datasource.write.table.type","COPY_ON_WRITE")
                    .option("hoodie.table.name",namePrefix + tableName)
                    .option("hoodie.datasource.write.precombine.field",keyName)
                    .option("hoodie.datasource.write.recordkey.field",keyName)
                    .mode(SaveMode.Append)
                    .save(String.format("hdfs://hadoop102:8020/etl_test/%s/%s",hdfsPath,tableName));

            System.out.println("结束时间为：" + DateFormatUtil.toYmdHms(System.currentTimeMillis()));
        }
    }

    /**
     * 获取数据库配置信息
     * @param databaseConfig
     * @return 数据库配置信息
     */
    private static Properties getProperties(DatabaseConfig databaseConfig) {
        Properties properties = new Properties();
        properties.setProperty("user",databaseConfig.getUser());
        properties.setProperty("password",databaseConfig.getPassword());
        switch (databaseConfig.getType()){
            case "mysql":
                properties.setProperty("driver",MYSQL_DRIVER);
                break;
            case "oracle":
                properties.setProperty("driver",ORACLE_DRIVER);
                break;
            case "sqlserver":
                properties.setProperty("driver",SQLSERVER_DRIVER);
                break;
            default:
                throw new RuntimeException("This database is not supported!");
        }
        return properties;
    }
}
