package com.wyt.spark.hive;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.*;

/**
 * @Description:
 * @Author wangyongtao
 * @Date 2021/11/18 10:21
 **/
public class HiveUtils {

    public static void queryByHiveDriver(SparkSession sparkSession) {
        String driverName = "org.apache.hive.jdbc.HiveDriver";
        try {
            Class.forName(driverName);
            Connection con = DriverManager.getConnection("jdbc:hive2://172.172.178.173:7001/default", "hadoop", "hadoop");//后两个参数是用户名密码
            if (con == null)
                System.out.println("连接失败");
            else {
                Statement stmt = con.createStatement();
                String sql = "SELECT * FROM action limit 10";
                System.out.println("Running: " + sql);
                ResultSet res = stmt.executeQuery(sql);
                int a = 0;
                while (res.next()) {
                    System.out.println(res.getString(1));
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    }

    public static void queryFromMysqlBySpark(SparkSession sparkSession) {
        String url = "jdbc:mysql://172.21.205.134:3306/kolap_webserver";
        String table = "k_olap_model";

        Properties prop = new Properties();
        prop.put("user", "root");
        prop.put("password", "root");

        Dataset<Row> dataset = sparkSession.read().format("mysql").jdbc(url, table, prop);

        dataset.toDF().createOrReplaceTempView("temp");

        Dataset<Row> dataset2 = sparkSession.sql("select model_name,model_status from temp");

        dataset2.show();

    }

    public static void insertHiveFromMysqlBySpark(SparkSession sparkSession) {
        String db = "gdm";
        sparkSession.sql("use " + db);

        String sql = "select * from gdm_ggycode_kolap_partition_fact_de";
        String url = "jdbc:mysql://172.21.15.88:3306/wyt_test_db1";
        String table = "wyt_test_db1.gdm_ggycode_kolap_partition_fact_de";

        Properties properties = new Properties();
        properties.put("driver", "com.mysql.jdbc.Driver");
        properties.put("user", "root");
        properties.put("password", "123456");

        sparkSession.sql(sql).write()
                .mode(SaveMode.Append)
                .jdbc(url, table, properties);
    }

    public static void insertHive(SparkSession sparkSession) {
        String dbTableName = "temp.test_null_to_default";
        Object[] partitionInfo = new Object[]{"dt", "2022-05-09"};
        String[] fields = null;

        /** simple */
        Object[][] values = new Object[][]{
                {"name1", 220.05, 1000, true},
                {null, 220.05, 1000, true},
                {"name3", null, 1000, true},
                {"name4", 220.05, null, true},
                {"name5", 220.05, 1000, null},
        };

        /** bigdata */
        Random random = new Random();
        int cnt = 10000;
        Object[][] bigDataValues = new Object[cnt][5];
        for (int i = 0; i < cnt; i++) {
            bigDataValues[i][0] = i;
            bigDataValues[i][1] = random.nextDouble();
            bigDataValues[i][2] = "姓名" + random.nextInt();
            bigDataValues[i][3] = "类型" + random.nextInt();
            bigDataValues[i][4] = "区域" + random.nextInt();
        }


        insertHiveFromHiveBySpark(sparkSession, dbTableName, partitionInfo, fields, values);
    }

    public static void copyToHiveFromHiveBySpark(SparkSession sparkSession) {

        String fromSql = "select COALESCE(name,''),COALESCE(price,0),COALESCE(num,0),COALESCE(flag,false),dt from temp.test_null_to_default";

        String targetTableName = "temp.test_null_to_default2";

        sparkSession.sql(fromSql).write().mode(SaveMode.Overwrite).insertInto(targetTableName);

    }

    private static void insertHiveFromHiveBySpark(SparkSession sparkSession, String dbTableName, Object[] partitionInfo, String[] fields, Object[][] values) {
        System.setProperty("HADOOP_USER_NAME", "hive");

        StringBuffer insertSql = new StringBuffer();
        insertSql.append("insert into table ").append(dbTableName);

        if (partitionInfo != null && partitionInfo.length > 0) {
            insertSql.append(" partition(").append(partitionInfo[0]).append("=");
            if (partitionInfo[1].getClass().isInstance("")) {
                insertSql.append("'").append(partitionInfo[1]).append("'");
            } else {
                insertSql.append(partitionInfo[1]);
            }
            insertSql.append(") ");
        }

        insertSql.append(" values ");

        for (int i = 0; i < values.length; i++) {
            insertSql.append(" (");
            for (int j = 0; j < values[0].length; j++) {
                if (values[i][j] == null) {
                    insertSql.append("null");
                } else if (values[i][j].getClass().isInstance("")) {
                    insertSql.append("'").append(values[i][j]).append("'");
                } else {
                    insertSql.append(values[i][j]);
                }
                insertSql.append(",");
            }
            insertSql.deleteCharAt(insertSql.length() - 1);
            insertSql.append(")").append(",");
        }
        insertSql.deleteCharAt(insertSql.length() - 1);

        System.out.println(insertSql.toString());

        sparkSession.sql(insertSql.toString());
    }

    public static void main(String[] args) {
        SparkSession sparkSession = SparkSession.builder()
                .appName("hzw02demos")
                .master("local")
                .enableHiveSupport()
                .getOrCreate();

        copyToHiveFromHiveBySpark(sparkSession);
    }

}
