package com.atguigu.gmall.realtime.joindemo;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @Author lzc
 * @Date 2023/2/13 10:45
 */
public class LeftJoinToKafkaDemo_1 extends BaseSQLApp {
    public static void main(String[] args) {
        new LeftJoinToKafkaDemo_1().init(6000, 2, "LeftJoinToKafkaDemo_1");
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        // 在内连接的时候: 每条数据在内存中只保留 30s, 超过 30s 之后, 数据被自动清除
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(30));
        
        tEnv.executeSql("create table t1(" +
                            "id string," +
                            " sex string, " +
                            " score int " +
                            ")"+ SQLUtil.getDDLKafkaSource("t1", "atguigu", "csv"));
    
        tEnv.executeSql("create table t2(" +
                            "id string," +
                            " age int)"
                            + SQLUtil.getDDLKafkaSource("t2", "atguigu", "csv"));
        
        
        // 内连接
        Table result = tEnv.sqlQuery("select " +
                                        "t1.id id, " +
                                        "sex, " +
                                        "score, " +
                                        "age " +
                                        "from t1 " +
                                        "left join t2 on t1.id=t2.id");
        
        tEnv.executeSql("create table t_left_1(" +
                            " id string, " +
                            " sex string, " +
                            " score int, " +
                            " age int, " +
                            " primary key (id) not enforced" +
                            ")" + SQLUtil.getDDLUpsertKafka("t_left_1", "json"));
    
        result.executeInsert("t_left_1");
    }
}
/*
普通的 kafka, 只能写新增的数据(insert) , 有更新(update)或删除的数据不能写入到普通的 kafka
 
 */