package org.atguigu.gmall.realtime.join;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.atguigu.gmall.realtime.app.BaseSQLApp;
import org.atguigu.gmall.realtime.util.SQLUtil;

/**
 * @Author zenghaifeng
 * @Date 2023/4/25 0:30
 * @Package: org.atguigu.gmall.realtime.join
 * @Class: LookUpJoin
 * @Description: TODO
 * @Version 1.0
 */
public class LookUpJoin extends BaseSQLApp {
    public static void main(String[] args) {
        new LookUpJoin().init(50005,2,"LookUpJoin");
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tbe) {
        tbe.executeSql("create table test(" +
                "id string, " +
                "pt as proctime() " +
                ")" + SQLUtil.getKafkaSourceDDL("test", "LookupJoin", "csv"));


        tbe.executeSql("create table base_dic(" +
                " dic_code string, " +
                " dic_name string " +
                ")with(" +
                " 'connector' = 'jdbc'," +
                " 'url' = 'jdbc:mysql://hadoop162:3306/gmall2023?useSSL=false'," +
                " 'table-name' = 'base_dic', " +
                " 'lookup.cache' = 'PARTIAL'," +
                " 'lookup.partial-cache.max-rows' = '200'," +
                " 'lookup.partial-cache.expire-after-access' = '20 second', " +
                " 'username' = 'root', " +
                " 'password' = 'aaaaaa' " +
                ")");


        Table result = tbe.sqlQuery("select " +
                " test.id, " +
                " dic.dic_name " +
                "from test " +
                "join base_dic for system_time as of test.pt as dic " +
                "on test.id=dic.dic_code");
        result.execute().print();
    }
}
