package cn.itcast.flink.hive;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * Flink SQL 集成Hive，首先添加依赖，其次创建Catalog，注册，使用，最后读取Hive表中数据
 * @author lilulu
 * @date 2023-04-10 20:22
 */
public class SqlConnectorHiveSourceDemo {
    public static void main(String[] args) {
        TableEnvironment tableEnvironment = TableEnvironment.create(EnvironmentSettings.newInstance().inBatchMode().useBlinkPlanner().build());

        HiveCatalog hiveCatalog = new HiveCatalog(
                "hiveCatalogJava",
                "default",
                "flink-sql/src/main/resources/hive-conf",
                "flink-sql/src/main/resources/hadoop-conf",
                "3.1.2"
        );

        tableEnvironment.registerCatalog("hive_catalog_java", hiveCatalog);

        tableEnvironment.executeSql("show databases").print();
        System.out.println("==================================");
        tableEnvironment.executeSql("select * from test_sql.emp").print();
        System.out.println("==================================");
        tableEnvironment.executeSql("select * from hive_catalog_java.test_sql.emp").print();

    }
}
