package com.mlamp.me;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * Desc
 */
public class HiveDemoCreateTwoWithPARTITION {
    public static void main(String[] args){
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);

        String name            = "myhive";
        String defaultDatabase = "hive_tmp";
        String hiveConfDir     = "./src/main/resources";
        String version         = "2.3.4";

        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir,version);
        //注册catalog  hive_tmp.log_hive
        tableEnv.registerCatalog("hive_tmp", hive);
        //使用注册的catalog
        tableEnv.useCatalog("hive_tmp");

        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);

        /**
         * 不能使用 if not exists
         */
        //向Hive表中写入数据
        try {
            String createSql = "CREATE TABLE  user_log (\n" +
                    "  user_id STRING\n" +
                    "  ,item_id STRING\n" +
                    "  ,category_id STRING\n" +
                    "  ,behavior STRING\n" +
                    ") PARTITIONED BY (ds STRING) \n" +
                    "STORED AS parquet";
            TableResult result = tableEnv.executeSql(createSql);

            result.collect().forEachRemaining(ele -> System.out.println("out==>> "+ ele.toString()));
        } catch (Exception e) {
            e.printStackTrace();
        }

        String insertSql = "insert into user_log PARTITION(ds = 'first')  " +
                "values"
                +"('组一','北京','bj','beijing'),"
                +"('组二','深圳','sz','shenzhen')"
                ;
        tableEnv.executeSql(insertSql);

        /*String insertSql = "insert into dm_info_test_df(group_code,group_name,prov_code,prov_name) " +
                "values"
                +"('组一','北京','bj','beijing')"
                +"('组二','深圳','sz','shenzhen')"
                ;
        tableEnv.executeSql(insertSql);

        String selectSQL = "select * from dm_info_test_df";
        TableResult selectResult = tableEnv.executeSql(selectSQL);
        selectResult.collect().forEachRemaining(ele -> System.out.println("select==>> "+ ele.toString()));

*/
//        System.out.println(result.getJobClient().get().getJobStatus());
    }
}