package com.bawei.flink.tableapi;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class RegularJoinDemo {
    public static void main(String[] args) {
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();

        TableEnvironment tEnv = TableEnvironment.create(settings);

        String name            = "myhive";
        String defaultDatabase = "default";
        String hiveConfDir     = "file:///C:\\Users\\wangyadi\\Desktop\\conf";

        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
        tEnv.registerCatalog("myhive", hive);

// set the HiveCatalog as the current catalog of the session
        tEnv.useCatalog("myhive");

        Table stu = tEnv.sqlQuery("select id,name,sex,age from t_stu");
        tEnv.createTemporaryView("my_t_stu",stu);

        tEnv.executeSql("CREATE TABLE t_scores(\n" +
                "  user_id STRING,\n" +
                "  score INT\n" +
                ") WITH (\n" +
                " 'connector' = 'kafka',\n" +
                " 'topic' = 'scores',\n" +
                " 'properties.bootstrap.servers' = 'cdh1:9092,cdh2:9092,cdh3:9092',\n" +
                " 'properties.group.id' = 'testGroup',\n" +
                " 'scan.startup.mode' = 'latest-offset',\n" +
                " 'format' = 'csv',\n" +
                " 'csv.ignore-parse-errors' = 'true',\n" +
                " 'csv.allow-comments' = 'true'\n" +
                ")");

        tEnv.sqlQuery("select id,name,sex,age,score from t_scores inner join my_t_stu on t_scores.user_id = my_t_stu.id").execute().print();

    }
}
