package com.example.two;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.jdbc.internal.options.JdbcOptions;
import org.apache.flink.connector.jdbc.table.JdbcTableSource;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * TODO
 *
 * @author coxx_
 * @date 2020/10/30 16:08
 */
public class Job {

    public static void createHiveCatalog(StreamTableEnvironment tEnv, DataStream<?> dataSource){
        //构造hive catalog 定义一个唯一的名称表示
        String name = "myhive";
        // 默认数据库名称
        String defaultDatabase = "default";
        // hive-site.xml
        String hiveConfDir = "/export/servers/nc/hive/conf/";
        // hive version
        String version = "2.3.6";
        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
        tEnv.registerCatalog("myhive", hive);
        // hive catalog 配置下的环境输出
        tEnv.useCatalog("myhive");
        // 支持hive sql Dialect
        tEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        tEnv.useDatabase("default");
        // 创建临时表数据
        tEnv.createTemporaryView("users", dataSource);
        // 如果hive中已经存在了相应的表，则这段代码省略
        /*String hiveSql = "CREATE external TABLE fs_table (\n" +
                "  user_id STRING, order_amount DOUBLE" +
                ") partitioned by (dt string,h string,m string) " +
                "stored as ORC " +
                "TBLPROPERTIES (\n" +
                "  'partition.time-extractor.timestamp-pattern'='$dt $h:$m:00',\n" +
                "  'sink.partition-commit.delay'='0s',\n" +
                "  'sink.partition-commit.trigger'='partition-time',\n" +
                "  'sink.partition-commit.policy.kind'='metastore'" +
                ")";
        tEnv.executeSql(hiveSql);*/
        String insertSql = "insert into  fs_table SELECT userId, amount, " +
                " DATE_FORMAT(ts, 'yyyy-MM-dd'), DATE_FORMAT(ts, 'HH'), DATE_FORMAT(ts, 'mm') FROM users";
        tEnv.executeSql(insertSql);
    }

    public static void createMysqlCatalog(StreamExecutionEnvironment env, StreamTableEnvironment tEnv){
        // 暂时仅仅支持 PostgresDialect
        JdbcTableSource source = JdbcTableSource.builder().setOptions(JdbcOptions.builder()
                .setDBUrl("jdbc:mysql://localhost:3306/sign_test_s1").setDriverName("com.mysql.cj.jdbc.Driver")
                .setUsername("root").setPassword("Root123.").setTableName("user").build()).build();
        tEnv.createTemporaryView("user", source.getDataStream(env));
        tEnv.executeSql("select * from user").print();
    }

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);
        env.enableCheckpointing(10000);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        EnvironmentSettings environmentSettings = EnvironmentSettings.newInstance().useBlinkPlanner()
                .inStreamingMode().build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, environmentSettings);
        DataStream<Tuple2<String, String>> dataSource = env.fromElements(Tuple2.of("coxx_jie", "coxx@vip.qq.com"));
        // 创建 hive table
        //Job.createHiveCatalog(tEnv, dataSource);
        // 创建 mysql table
        Job.createMysqlCatalog(env, tEnv);
    }
}
