package com.mlamp.me;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * Desc
 */
public class HiveDemoInsertTwo {
    public static void main(String[] args){
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);

        String name            = "myhive";
        String defaultDatabase = "hive_tmp";
        String hiveConfDir     = "./src/main/resources";
        String version         = "2.3.4";

        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir,version);
        //注册catalog  hive_tmp.log_hive
        tableEnv.registerCatalog("hive_tmp", hive);
        //使用注册的catalog
        tableEnv.useCatalog("hive_tmp");

        //向Hive表中写入数据
        String insertSQL = "insert into hive_tmp.log_hive select * from hive_tmp.log_hive";
        TableResult insertResult = tableEnv.executeSql(insertSQL);
        insertResult.collect().forEachRemaining(ele -> System.out.println("insert==>> "+ ele.toString()));

        String selectSQL = "select * from hive_tmp.log_hive";
        TableResult selectResult = tableEnv.executeSql(selectSQL);
        selectResult.collect().forEachRemaining(ele -> System.out.println("select==>> "+ ele.toString()));

        System.out.println(insertResult.getJobClient().get().getJobStatus());
    }
}