package com;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @Description: TODO QQ1667847363
 * @author: xiao kun tai
 * @date:2022/2/11 8:55
 * <p>
 * <p>
 * TODO FLinkSQl 批处理   流处理
 * <p>
 * TODO Mysql 到  Hive
 */
public class MysqlToHiveSql {
    public static void main(String[] args) throws Exception {
        /**
         * Flink Table Api 批处理 新方法
         */
        /*EnvironmentSettings settings = EnvironmentSettings.inBatchMode();
        TableEnvironment tableEnv = TableEnvironment.create(settings);*/


        /**
         * link Table Api 批处理 旧方法
         */
        /*EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);*/


        /**
         * Flink Table Api 流处理
         */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        System.setProperty("HADOOP_USER_NAME", "root");

        /**
         * 从Mysql读取全表数据
         */
        Long startTime = System.currentTimeMillis();

        /**
         * mysql_hive表
         */
        /*String columns = "id INT ,name STRING, age INT,money DOUBLE,todate DATE,ts TIMESTAMP";
        String mysql_source_table = "mysql_hive";*/


        String columns = "wlorddate timestamp, wladmdate date, wladmtype string, wlarccat string, wlpatdep string, wlpatdoc string, fee double,arcqty double,wlorddatemonth string, wlorddateyear int";
        String mysql_source_table = "workloadfeetest2daydatatable";

        /**
         * workloadfeetestdaydatatable表
         */
        /*String columns = "wlorddate timestamp,patdept string,fee double,wlorddatemonth string,wlorddateyear int";
        String mysql_source_table = "workloadfeetestdaydatatable";*/

        /**
         * sql
         */
        String flink_source_table = "flink_mysql_hive";
        String base_sql = "CREATE TABLE %s (%s) " +
                "WITH (" +
                "'connector.type' = 'jdbc'," +
                "'connector.url' = 'jdbc:mysql://localhost:3306/test'," +
                "'connector.driver' = 'com.mysql.jdbc.Driver'," +
                "'connector.table' = '%s'," +
                " 'connector.username' = 'root'," +
                " 'connector.password' = 'A'" +
                " )";
        String source_ddl = String.format(base_sql, flink_source_table, columns, mysql_source_table);

        tableEnv.executeSql(source_ddl);


        /**
         * 指定数据
         */
        Table dataTable = tableEnv.sqlQuery("select * from " + flink_source_table);

        /**
         * 使用  hive  的元数据的 flink 的 table 环境
         */
        String name = "hive-test";
        String defaultDatabase = "test";
        String hiveConfDir = "Flink-1.12.1/src/main/resources";
        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
        tableEnv.registerCatalog(name, hive);
        tableEnv.useCatalog(name);
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        tableEnv.useDatabase("test");


        /**
         * 将Mysql数据导入Hive
         */

        /*tableEnv.insertInto("mysql_hive",dataTable);
        tableEnv.execute("MysqlToHive");*/


        StatementSet statementSet = tableEnv.createStatementSet();

        /**
         * 是否覆盖
         */
        /*statementSet.addInsert("mysql_hive", dataTable,true);*/

        /*statementSet.addInsert("mysql_hive", dataTable);*/

        statementSet.addInsert("workloadfeetest2daydatatable", dataTable);


        /*statementSet.addInsert("workloadfeetestdaydatatable", dataTable);*/

        statementSet.execute();

        Long endTime = System.currentTimeMillis();

        Long tempTime = (endTime - startTime);

        System.out.println("花费时间："+

                (((tempTime/86400000)>0)?((tempTime/86400000)+"d"):"")+

                ((((tempTime/86400000)>0)||((tempTime%86400000/3600000)>0))?((tempTime%86400000/3600000)+"h"):(""))+

                ((((tempTime/3600000)>0)||((tempTime%3600000/60000)>0))?((tempTime%3600000/60000)+"m"):(""))+

                ((((tempTime/60000)>0)||((tempTime%60000/1000)>0))?((tempTime%60000/1000)+"s"):(""))+

                ((tempTime%1000)+"ms"));


    }
}
