package com.atguigu.flink.chapter10.join;

import org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @Author lzc
 * @Date 2023/6/29 13:56
 */
public class LookupJoinDemo {
    public static void main(String[] args) {
      
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.getCheckpointConfig().setCheckpointStorage(new JobManagerCheckpointStorage());
    
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(20));
//        tEnv.getConfig().set("table.exec.state.ttl", "20 second");
        
        tEnv.executeSql("create table t1(" +
                            " id string," +
                            " pt as proctime() " +  // 10
                            ")with(" +
                            "  'connector' = 'kafka', " +
                            "  'topic' = 't1', " +
                            "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                            "  'properties.group.id' = 'atguigu', " +
                            "  'scan.startup.mode' = 'latest-offset', " +
                            "  'format' = 'csv' " +
                            ")");
    
        tEnv.executeSql("create table base_dic(" +
                            " dic_code string, " +
                            " dic_name string " +
                            ")with(" +
                            " 'connector' = 'jdbc'," +
                            " 'url' = 'jdbc:mysql://hadoop162:3306/gmall2023?useSSL=false', " +
                            " 'table-name' = 'base_dic', " +
                            " 'username' = 'root', " +
                            " 'lookup.cache' = 'PARTIAL', " +
                            " 'lookup.partial-cache.max-rows' = '20', " +
                            " 'lookup.partial-cache.expire-after-access' = '20 second', " +
                            " 'password' = 'aaaaaa' " +
                            ")");
    
        tEnv.sqlQuery("select " +
                          " id," +
                          " dic_name " +
                          "from t1 " +
                          "join base_dic for system_time as of t1.pt as dic " +
                          "on t1.id=dic.dic_code ")
            .execute()
            .print();
        
        
    
     
        
    }
}
/*
默认情况下, 所有数据永远存储到状态中(内存)
生产环境, 一定要给数据设置 ttl
 */