package com.atguigu.gmall.realtime.joindemo;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/12/6 10:12
 */
public class SQLConsumeLeftJjoinDemo extends BaseSQLApp {
    public static void main(String[] args) {
        new SQLConsumeLeftJjoinDemo().init(
            8888,
            2,
            "SQLConsumeLeftJjoinDemo"
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
       
    
        // 普通 kakfa 消费 upsert 结果: topic 种的 null, 会自动忽略
        tEnv.executeSql("create table ta(" +
                            "   id int, " +
                            "   name string, " +
                            "   age int " +
                            ")" + SQLUtil.getKafkaSourceDDL("ta", "abc", "json"));
    
        
        tEnv.sqlQuery("select * from ta").execute().print();
        
        
    }
}
/*
| +I |          17 |             100              lisi |      (NULL) |   这行数据的生成时间
| +I |          17 |             100              lisi |         170 |   这行数据的生成时间

后面 dws 层进行聚和的时候: 需要去重. 保留最后一个
| +I |          17 |             100                  lisi |      (NULL) |
| +I |          17 |             0                    lisi |         170 |




 */