package com.atguigu.gmall.realtime.joindemo;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @Author lzc
 * @Date 2022/12/30 10:15
 */
public class RegularJoin_Inner_Kafka extends BaseSQLApp {
    public static void main(String[] args) {
        new RegularJoin_Inner_Kafka().init(
            5001,
            2,
            "RegularJoin_1"
        );
        
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        //每张表的每条数据, 在状态中保存 30s. 超过 30s 后,数据会被自动清除
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(30));
       // tEnv.getConfig().getConfiguration().setString("table.exec.state.ttl", "30 s");
        
        tEnv.executeSql("create table t1(" +
                            " id int, " +
                            " name string " +
                            ")" + SQLUtil.getKafkaSourceDDL("t1", "atguigu", "csv"));
    
        tEnv.executeSql("create table t2(" +
                            " id int, " +
                            " age int " +
                            ")" + SQLUtil.getKafkaSourceDDL("t2", "atguigu", "csv"));
    
    
        Table result = tEnv.sqlQuery("select " +
                                        " t1.id," +
                                        " name, " +
                                        " age " +
                                        "from t1 " +
                                        ", t2 where t1.id=t2.id");
        
        // 建立一张表与 kafka 的 top 关联
        tEnv.executeSql("create table ta(" +
                            " id int, " +
                            " name string, " +
                            " age int " +
                            ")" + SQLUtil.getKafkaSinkDDL("ta", "csv"));
    
        result.executeInsert("ta");
    }
}
/*


 */