package com.atguigu.gmall.realtime.join;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2023/8/2 09:08
 */
public class LookupJoin extends BaseSQLApp {
    public static void main(String[] args) {
        new LookupJoin().start(
            8888,
            2,
            "LookupJoin"
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
       // sql中定义处理时间: pt as proctime()
        tEnv.executeSql("create table t1(" +
                            " id string, " +
                            " pt as proctime() " +
                            ")" + SQLUtil.getKafkaSourceSQL("LookupJoin", "t20", "csv"));
        
        
        tEnv.executeSql("create table base_dic (" +
                            " dic_code string," + // 必须有一个原始数据类型, 用来读取 rowKey, 字段名随意
                            " info row<dic_name string> " + // 定义列族: 类型必须是 row, row 内的列对应 hbase 中列族的列, 访问 info.dic_name
                            ") with (" +
                            " 'connector' = 'hbase-2.2'," +
                            " 'table-name' = 'gmall:dim_base_dic'," +
                            " 'lookup.cache' = 'PARTIAL'," +
                            " 'lookup.partial-cache.expire-after-write' = '20 second'," + // ttl更新: 当数据写入到缓存的时候
                           // " 'lookup.partial-cache.expire-after-access' = '20 second'," + // ttl更新: 每访问一次这个值, 就跟新一下 ttl
                            " 'lookup.partial-cache.max-rows' = '10'," + // 缓存的最大行数
                            " 'zookeeper.quorum' = 'hadoop162,hadoop163,hadoop164:2181'" +
                            ")");
        
        
        
        tEnv.sqlQuery("select " +
                          " id, " +
                          " dic.info.dic_name " +
                          "from t1 " +
                          "left join base_dic for system_time as of t1.pt as dic " +
                          "on t1.id=dic.dic_code")
            .execute()
            .print();
        
    
    
    }
}
