package com.atguigu.gmall.realtime.joindemo;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2023/2/13 15:32
 */
public class LookupJoinDemo extends BaseSQLApp {
    public static void main(String[] args) {
        new LookupJoinDemo().init(
            6000,
            2,
            "LookupJoinDemo"
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        tEnv.executeSql("create table t_look(" +
                            "  id string, " +
                            "  pt as proctime() " +
                            ")" + SQLUtil.getDDLKafkaSource("t_look", "a", "csv"));
        
        tEnv.executeSql("CREATE TABLE base_dic (" +
                            "  dic_code string," +
                            "  dic_name STRING " +
                            ") WITH (" +
                            "  'connector' = 'jdbc'," +
                            "  'url' = 'jdbc:mysql://hadoop162:3306/gmall2022?useSSL=false'," +
                            "  'table-name' = 'base_dic', " +
                            "  'username' = 'root', " +
                            "  'lookup.cache.ttl' = '30 second', " +
                            "  'lookup.cache.max-rows' = '10', " +
                            "  'password' = 'aaaaaa' " +
                            ")");
        
        // look up
        tEnv.sqlQuery("select " +
                          " t.id, " +
                          " dic_name " +
                          "from t_look as t " +
                          "join base_dic for system_time as of t.pt as dic " +
                          "on t.id=dic.dic_code")
            .execute()
            .print();
        
    }
}
/*
look up join
 默认对查到的维度不做任何缓存, 结果准确, 但是效率低
 */