package com;

import org.apache.flink.table.api.*;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.io.File;

/**
 * @Description: TODO QQ1667847363
 * @author: xiao kun tai
 * @date:2022/2/11 8:55
 * <p>
 * <p>
 * TODO FLinkSQl 批处理   流处理
 * <p>
 * TODO Mysql 到  Hive
 */
public class MysqlToHiveSqlTest {
    public static void main(String[] args) throws Exception {


        /**
         * Flink Table Api 流处理
         */
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);
        System.setProperty("HADOOP_USER_NAME", "root");

        /**
         * 从Mysql读取全表数据
         */

        /**
         * mysql_hive表
         */
        /*String columns = "id INT ,name STRING, age INT,money DOUBLE,todate DATE,ts TIMESTAMP";
        String mysql_source_table = "mysql_hive_test0";*/


        String columns = "wlorddate timestamp, wladmdate date, wladmtype string, wlarccat string, wlpatdep string, wlpatdoc string, fee double,arcqty double,wlorddatemonth string, wlorddateyear int";
        String mysql_source_table = "mysql_hive_test1";

        /**
         * workloadfeetestdaydatatable表
         */
        /*String columns = "wlorddate timestamp,patdept string,fee double,wlorddatemonth string,wlorddateyear int";
        String mysql_source_table = "mysql_hive_test2";*/

        /**
         * sql
         */
        String flink_source_table = "flink_0_"+mysql_source_table;
        String base_sql = "CREATE TABLE %s (%s) " +
                "WITH (" +
                "'connector.type' = 'jdbc'," +
                "'connector.url' = 'jdbc:mysql://10.152.160.66:59923/mdrbase?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT&2B8'," +
                "'connector.driver' = 'com.mysql.cj.jdbc.Driver'," +
                "'connector.table' = '%s'," +
                " 'connector.username' = 'db_admin'," +
                " 'connector.password' = 'UQ8r=M=50P=C'" +
                " )";
        String source_ddl = String.format(base_sql, flink_source_table, columns, mysql_source_table);

        tableEnv.executeSql(source_ddl);


        /**
         * 指定数据
         */
        Table dataTable = tableEnv.sqlQuery("select * from " + flink_source_table);

        /**
         * 使用  hive  的元数据的 flink 的 table 环境
         */
        String name = "hive-test";
        String defaultDatabase = "mdrbase";


//        String hiveConfDir = System.getProperty("user.dir") + File.separator + "conf";
        String hiveConfDir = MysqlToHiveSqlTest.class.getClassLoader().getResource("").toString();

        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
        tableEnv.registerCatalog(name, hive);
        tableEnv.useCatalog(name);
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        tableEnv.useDatabase(defaultDatabase);


        /**
         * 将Mysql数据导入Hive
         */


        StatementSet statementSet = tableEnv.createStatementSet();

        /**
         * 是否覆盖
         */
//        statementSet.addInsert(mysql_source_table, dataTable,true);
        statementSet.addInsert(mysql_source_table, dataTable);


        statementSet.execute();



    }
}
