package org.danan.spark2hudi.app;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;
import org.danan.spark2hudi.utils.DateFormatUtil;
import org.danan.spark2hudi.utils.JDBCUtil;

import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;


/**
 * Created with IntelliJ IDEA.
 *
 * @Author: NanHuang
 * @Date: 2023/07/06/18:09
 * @Description: 采集Hudi数据，写入Doris
 */
public class Hudi2Doris {
    public static void main(String[] args) throws SQLException {
//        System.setProperty("HADOOP_USER_NAME","danan");
        // 1 创建配置对象
        SparkConf conf = new SparkConf()
//                .setMaster("local[2]")
                .setAppName("Hudi2Database")
                .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                .set("spark.sql.legacy.timeParserPolicy", "LEGACY")
                .set("spark.sql.parquet.datetimeRebaseModeInRead","CORRECTED")
                .set("spark.kryoserializer.buffer.max", "512m");
//                .set("spark.sql.avro.datetimeRebaseModeInWrite","；CORRECTED");


        SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());


        System.out.println("开始时间为：" + DateFormatUtil.toYmdHms(System.currentTimeMillis()));
        // 2 读Hudi数据
        Dataset<Row> ds = spark.read().format("hudi")
                .load("hdfs://hadoop56:8020/etl_test/sxyc/emr/HRDiagnose");
//                .load("hdfs://hadoop102:8020/spark2hudi/tdy_list_onurse");
        JavaRDD<Row> rdd = ds.javaRDD();
        // 收集所有数据
        List<Row> lists = rdd.collect();


        // 3 写入Doris
        Connection dorisConn = JDBCUtil.getDorisConnection();
        dorisConn.setAutoCommit(false);
        String template = "insert into dwd.DWD_MHS_IPT_RECORD values(%s)";
        ArrayList<String> sqls = new ArrayList<>();

        for (Row row : lists) {
            ArrayList<String> values = new ArrayList<>();
                for (int i = 0, j = 5; i < 50; i++, j++) {
                    if (i == 0){
                        String value = String.format("'%s'", row.get(j) == null ? "" : row.get(j).toString());
                        values.add(value);
                    } else {
                        values.add("'2000-01-01 00:00:00'");
                    }
                }
                String valueSql = String.join(",", values);
                String sql = String.format(template, valueSql);
                sqls.add(sql);
        }

//        rdd.foreach(new VoidFunction<Row>() {
//            @Override
//            public void call(Row row) throws Exception {
//                // 写入Doris
//                ArrayList<String> values = new ArrayList<>();
//                for (int i = 0, j = 5; i < 50; i++, j++) {
//                    String value = String.format("'%s'", row.get(j) == null ? "" : row.get(j).toString());
//                    values.add(value);
//                }
//                String valueSql = String.join(",", values);
//                String sql = String.format(template, valueSql);
//                sqls.add(sql);
//            }
//        });
        int counter = 0;
        Statement statement = dorisConn.createStatement();
        for (String sql : sqls) {
            counter++;
            statement.addBatch(sql);
            if (counter % 1000 == 0) {
                statement.executeBatch();
                dorisConn.commit();
                statement.clearBatch();
                System.out.println("共插入数据 ： " + counter);
            }
        }
        System.out.println("共插入数据 ： " + counter);
        statement.executeBatch();
        dorisConn.commit();
        dorisConn.close();

        // 关闭spark环境
        spark.close();
        jsc.close();

        System.out.println("结束时间为：" + DateFormatUtil.toYmdHms(System.currentTimeMillis()));

    }
}
