package cn.doitedu.sql;

import org.apache.flink.connector.jdbc.catalog.JdbcCatalog;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class _22_JdbcCatalog_Demo {

    public static void main(String[] args) {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 构造一个jdbc catalog的实例对象
        JdbcCatalog jdbcCatalog = new JdbcCatalog(
                _22_JdbcCatalog_Demo.class.getClassLoader(),
                "jdbc",
                "doit46",
                "root",
                "root",
                "jdbc:mysql://doitedu:3306/");


        // 注册这个catalog到 表环境中
        tenv.registerCatalog("doitedu_mysql",jdbcCatalog);


        // 切换current catalog
        //tenv.executeSql("use catalog doitedu_mysql");
        tenv.useCatalog("doitedu_mysql");
        tenv.executeSql("show databases").print();


        // 在jdbc catalog中，创建flink自己的表，是不支持的，会直接报错！！！！
//        tenv.executeSql(
//                " create table doit46.score_kfk_1(                                 "+
//                        "       user_id int,                                    "+
//                        "       course string,                                  "+
//                        " 	    score double                                     "+
//                        "  ) with (                                              "+
//                        "       'connector' = 'kafka',                           "+
//                        "       'topic' = 'score-test',                            "+
//                        "       'properties.bootstrap.servers' = 'doitedu:9092', "+
//                        "       'properties.group.id' = 'g003',                  "+
//                        "       'scan.startup.mode' = 'latest-offset',           "+
//                        "       'value.format' = 'json',                         "+
//                        "       'value.fields-include' = 'EXCEPT_KEY'            "+
//                        " )                                                      "
//        );




        // 但是，flink可以直接查询 jdbc catalog中的表（也就是mysql的表）
        // 这个查询，本质上是通过jdbc连接器去查询的；这种查询是一个“一次性”的
        tenv.executeSql("select * from doit46.student").print();


    }

}
