package com.atguigu.gmall.realtime.joindemo;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @Author lzc
 * @Date 2022/12/6 10:12
 */
public class LeftJoinToKafkaDemo extends BaseSQLApp {
    public static void main(String[] args) {
        new LeftJoinToKafkaDemo().init(
            8876,
            2,
            "JoinInnerDemo"
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        tEnv.executeSql("create table t1(" +
                            " id int, " +
                            " name string " +
                            ")" + SQLUtil.getKafkaSourceDDL("t1", "atguigu", "csv"));
    
        tEnv.executeSql("create table t2(" +
                            " id int, " +
                            " age int " +
                            ")" + SQLUtil.getKafkaSourceDDL("t2", "atguigu", "csv"));
        
        
        // 需要给数据加 ttl
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(20));
        Table result = tEnv.sqlQuery("select " +
                                        " t1.id, " +
                                        " name, " +
                                        "  age " +
                                        "from t1 " +
                                        "left join t2 on t1.id=t2.id");
    
    
        tEnv.executeSql("create table ta(" +
                            "   id int, " +
                            "   name string, " +
                            "   age int, " +
                            "   primary key(id) not enforced" +
                            ")" + SQLUtil.getUpsertDDL("ta", "json"));
    
        result.executeInsert("ta");
        
        
    }
}
/*
ttl:
    内连接的时候,数据 join 的过程, 即使一直有数据 join, 时间到了之后会自动清除
    
    左连接的时候: 当左表的数据超过 20s 空闲才会被清除. 右表的数据仍然是正常的清除
 */