package com.atguigu.gmall.realtime.test;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @Author lzc
 * @Date 2022/5/8 21:48
 */
public class LeftJoin extends BaseSQLApp {
    public static void main(String[] args) {
        new LeftJoin().init("LeftJoin", 10000, 1, "LeftJoin");
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));
        
        SingleOutputStreamOperator<Tuple2<String, String>> one = env
            .socketTextStream("hadoop162", 6666)
            .map(line -> {
                String[] data = line.split(",");
                return Tuple2.of(data[0], data[1]);
            }).returns(Types.TUPLE(Types.STRING, Types.STRING));
    
        SingleOutputStreamOperator<Tuple2<String, String>> two = env
            .socketTextStream("hadoop162", 7777)
            .map(line -> {
                String[] data = line.split(",");
                return Tuple2.of(data[0], data[1]);
            }).returns(Types.TUPLE(Types.STRING, Types.STRING));
    
    
        SingleOutputStreamOperator<Tuple2<String, String>> three = env
            .socketTextStream("hadoop162", 8888)
            .map(line -> {
                String[] data = line.split(",");
                return Tuple2.of(data[0], data[1]);
            }).returns(Types.TUPLE(Types.STRING, Types.STRING));
    
    
        Table t1 = tEnv.fromDataStream(one, $("id"), $("name"));
        Table t2 = tEnv.fromDataStream(two, $("id"), $("age"));
        Table t3 = tEnv.fromDataStream(three, $("id"), $("sex"));
        
        tEnv.createTemporaryView("t1", t1);
        tEnv.createTemporaryView("t2", t2);
        tEnv.createTemporaryView("t3", t3);
    
    
        Table result = tEnv.sqlQuery("select " +
                                        "t1.id id, " +
                                        "t1.name, " +
                                        "t2.age, " +
                                        "t3.sex  " +
                                        "from t1 " +
                                        "left join t2 on t1.id=t2.id " +
                                        "left join t3 on t1.id=t3.id ");
        
        tEnv.executeSql("create table a(" +
                            " id string, " +
                            " name string," +
                            " age string, " +
                            " sex string," +
                            " primary key(id) not enforced" +
                            ")" + SQLUtil.getUpsertKafkaDDL("test")) ;
    
    
        result.executeInsert("a");
    
    
    }
}
