package com.atguigu.realtime.join;

import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2023/3/13 15:13
 */
public class LookUpJoin extends BaseSqlApp {
    public static void main(String[] args) {
        new LookUpJoin().init(30000, 2, "LookUpJoin");
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        tEnv.executeSql("create table tt(" +
                            "id string, " +
                            "pt as proctime() " +
                            ")" + SQLUtil.getKafkaSourceDDL("tt", "LookUpJoin", "csv"));
        
        
        tEnv.executeSql("create table base_dic(" +
                            " dic_code string, " +
                            " dic_name string " +
                            ")with(" +
                            " 'connector' = 'jdbc'," +
                            " 'url' = 'jdbc:mysql://hadoop162:3306/gmall2022?useSSL=false'," +
                            " 'table-name' = 'base_dic', " +
                            " 'username' = 'root', " +
                            " 'password' = 'aaaaaa' ," +
                            " 'lookup.cache.ttl' = '30 second',  " +  // 把查到维度数据存入到缓存的时间
                            " 'lookup.cache.max-rows' = '20'  " +  // 最多缓存 20 行
                            ")");
    
        tEnv.sqlQuery("select " +
                          "id, " +
                          "dic.dic_name " +
                          "from tt " +
                          "join base_dic for system_time as of tt.pt as dic " +
                          "on tt.id=dic.dic_code")
            .execute()
            .print();
        
        
    }
}
