package com.atguigu.gmall.realtime.joindemo;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @Author lzc
 * @Date 2023/2/13 10:45
 */
public class LeftJoinDemo extends BaseSQLApp {
    public static void main(String[] args) {
        new LeftJoinDemo().init(6000, 2, "LeftJoinDemo");
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        // 在内连接的时候: 每条数据在内存中只保留 02s, 超过 20s 之后, 数据被自动清除
        // 在左连接的时候: 对右表也是只保留 20s
        //              对左表来说, 当左表的数据空闲时间超过 20s 的时候,才会删除
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(20));
        
        tEnv.executeSql("create table t1(" +
                            "id string," +
                            " name string)"
                            + SQLUtil.getDDLKafkaSource("t1", "atguigu", "csv"));
        
        tEnv.executeSql("create table t2(" +
                            "id string," +
                            " age int)"
                            + SQLUtil.getDDLKafkaSource("t2", "atguigu", "csv"));
        
        
        // 左连接
        // 当左表先来, 右表第一条来的时候先删咋增, 以后右表来都是增
        tEnv.sqlQuery("select " +
                          "t1.id, " +
                          "name, " +
                          "age " +
                          "from t1 " +
                          "left join t2 on t1.id=t2.id")
            .execute()
            .print();
        
    }
}
/*
steam的 join:
    window join
    
    interval join

flink sql

常规 join
1. 默认情况下, 左右表的数据永远保存到内存的状态中
2. 为了内存不出现 oom, 务必设置 ttl

 
 */