package com;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Properties;

/**
 * @Description: TODO QQ1667847363
 * @author: xiao kun tai
 * @date:2022/2/11 8:55
 * <p>
 * <p>
 * TODO FLinkSQl 批处理   流处理
 * <p>
 * TODO Mysql 到  Hive
 */
public class MysqlToHiveSqlProp {
    public static void main(String[] args) throws Exception {

        /**
         * 读取配置文件
         */

        Properties properties = new Properties();
        BufferedReader bufferedReader = new BufferedReader(new FileReader(args[0]));
        properties.load(bufferedReader);


        String host = properties.getProperty("flink.host");

        int port = Integer.parseInt(properties.getProperty("flink.port"));

        int parallelism = Integer.parseInt(properties.getProperty("flink.parallelism"));

        String jar_path = properties.getProperty("jar.path");
        String jar_names = properties.getProperty("jar.names");
        ArrayList<String> list = new ArrayList<>();
        for (String jar : jar_names.split(",")) {
            list.add(jar_path + jar);
        }

        String[] jarFiles = list.toArray(new String[list.size()]);

        String mysql_url = properties.getProperty("mysql.url");
        String mysql_driver = properties.getProperty("mysql.driver");
        String mysql_user = properties.getProperty("mysql.user");
        String mysql_table = properties.getProperty("mysql.table");
        String mysql_password = properties.getProperty("mysql.password");
        String mysql_table_columns_desc = properties.getProperty("mysql.table.columns.desc");
        String condition = properties.getProperty("mysql.table.condition");


        String hadoop_user = properties.getProperty("hadoop.user");

        String defaultDatabase = properties.getProperty("hive.database");
        String hive_table = properties.getProperty("hive.table");
        String hiveConfDir = properties.getProperty("hive.conf.dir");

        String hive_table_overwrite = properties.getProperty("hive.table.overwrite");
        boolean overwrite = false;
        if ("true".equals(hive_table_overwrite)) {
            overwrite = true;
        }


        /**
         * Flink Table Api 流处理
         */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createRemoteEnvironment(host, port, parallelism, jarFiles);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);


        /**
         * 从Mysql读取全表数据
         */


        /**
         * sql
         */
        String sql = "CREATE TABLE %s (%s) " +
                "WITH (" +
                "'connector.type' = 'jdbc'," +
                "'connector.url' = '%s'," +
                "'connector.driver' = '%s'," +
                "'connector.table' = '%s'," +
                " 'connector.username' = '%s'," +
                " 'connector.password' = '%s'" +
                " )";
        String ddl = String.format(sql,
                mysql_table + "_mysql_flink_hive",
                mysql_table_columns_desc,
                mysql_url,
                mysql_driver,
                mysql_table,
                mysql_user,
                mysql_password);

        tableEnv.executeSql(ddl);


        /**
         * 指定数据
         */
        Table dataTable = tableEnv.sqlQuery("select * from " + mysql_table + "_mysql_flink_hive where " + condition);

        /**
         * 使用  hive  的元数据的 flink 的 table 环境
         */
        String hiveCatalogName = "hive-test";
        System.setProperty("HADOOP_USER_NAME", hadoop_user);
        HiveCatalog hive = new HiveCatalog(hiveCatalogName, defaultDatabase, hiveConfDir);
        tableEnv.registerCatalog(hiveCatalogName, hive);
        tableEnv.useCatalog(hiveCatalogName);
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);

        /**
         * 将Mysql数据导入Hive
         */
        StatementSet statementSet = tableEnv.createStatementSet();
        /**
         * 是否覆盖
         */
        statementSet.addInsert(hive_table, dataTable, overwrite);
        statementSet.execute();

    }
}
