package com.wudl.flink.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @ClassName : Flink_Sql_HiveCatalog
 * @Description : Flink 操作HiveCatalog
 * @Author :wudl
 * @Date: 2021-08-20 21:50
 */

public class Flink_Sql_HiveCatalog {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 1. 创建HiveCatalog
        HiveCatalog hiveCatalog = new HiveCatalog("myHive", "db_wudl", "wudl-flink-12/input");
        //2.注册HiveCatalog
        tableEnv.registerCatalog("myHive", hiveCatalog);
        //4.使用HiveCatalog
        tableEnv.useCatalog("myHive");
//        tableEnv.executeSql("select * from dept_copy ").print();
        tableEnv.executeSql("select * from db_wudl.dept").print();
//        tableEnv.executeSql("load data inpath '/tmp/dept.txt' overwrite into table db_wudl.dept");
        System.out.println("---------------------------------------");
//        TableResult tableResult = tableEnv.executeSql("select * from db_wudl.dept");
//        System.out.println(tableResult.getJobClient().get().getJobStatus());

//        tableEnv.executeSql("CREATE  TABLE IF NOT EXISTS db_wudl.dept_copy ( deptno INT, dname string, loc INT ) 'ROW' format delimited FIELDS TERMINATED BY '\t'");

//        tableEnv.executeSql("insert into  dept_copy select deptno, dname, loc from dept");







    }
}
