import com.atguigu.bigdata.gmall.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/10/11 11:36
 */
public class LookupJoin extends BaseSQLApp {
    public static void main(String[] args) {
        new LookupJoin().init(10000, 2, "abc");
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        
        
        tEnv.executeSql("create table person(" +
                            " id string, " +
                            " pt as proctime()" +
                            ")" + SQLUtil.getKafkaSource("per", "atguigu"));
        
        
        tEnv.executeSql("CREATE  TABLE base_dic ( " +
                            "  dic_code string, " +
                            "  dic_name string " +
                            ") WITH ( " +
                            "  'connector' = 'jdbc', " +
                            "  'url' = 'jdbc:mysql://hadoop162:3306/gmall2022?useSSL=false', " +
                            "  'table-name' = 'base_dic', " +
                            "  'username' = 'root', " +
                            "  'password' = 'aaaaaa',  " +
                            "'lookup.cache.max-rows' = '10'," +
                            "'lookup.cache.ttl' = '1 minute' " +
                            ")");
        
        
        tEnv
            .sqlQuery("select " +
                          "person.id, " +
                          "dic.dic_name " +
                          "from person " +
                          "join base_dic for system_time as of person.pt as dic " +
                          "on person.id=dic.dic_code")
            .execute()
            .print();
        
    }
}
/*
look up Join:
    默认每次总是去查询数据库.
    
    一般是用来事实表和维度表的 join. 维度表来说, 变化比较慢
 */