package com.atguigu.app;

import com.atguigu.bean.Bean1;
import com.atguigu.bean.Bean2;
import com.atguigu.utils.KafkaUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
/*
别人案例
 */
import java.time.Duration;

public class FlinkSQLJoin1 {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        System.out.println(tableEnv.getConfig().getIdleStateRetention());
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));

        SingleOutputStreamOperator<Bean1> bean1DS = env.socketTextStream("hadoop102", 8888)
                .map(line -> {
                    String[] fields = line.split(",");
                    return new Bean1(fields[0], fields[1], Long.parseLong(fields[2]));
                });

        SingleOutputStreamOperator<Bean2> bean2DS = env.socketTextStream("hadoop102", 9999)
                .map(line -> {
                    String[] fields = line.split(",");
                    return new Bean2(fields[0], fields[1], Long.parseLong(fields[2]));
                });

        //将流转换为动态表
        //Table table1 = tableEnv.fromDataStream(bean1DS);
        tableEnv.createTemporaryView("t1", bean1DS);
        //Table table2 = tableEnv.fromDataStream(bean2DS);
        tableEnv.createTemporaryView("t2", bean2DS);

        //内连接   左表:OnCreateAndWrite   右表:OnCreateAndWrite
//        tableEnv.sqlQuery("select t1.id,t1.name,t2.sex from t1 join t2 on t1.id=t2.id")
//                .execute()
//                .print();

        //左外连接 左表:OnReadAndWrite     右表:OnCreateAndWrite
        Table table = tableEnv.sqlQuery("select t1.id,t1.name,t2.sex from t1 left join t2 on t1.id=t2.id");

        //构建Kafka表
        tableEnv.executeSql("" +
                "create table kafka_test( " +
                " id string," +
                " name string," +
                " sex string," +
                " PRIMARY KEY (id) NOT ENFORCED)" + KafkaUtil.getKafkaUpsertSinkDDL("test"));

        tableEnv.executeSql("insert into kafka_test select * from " + table);

        //右外连接 左表:OnCreateAndWrite   右表:OnReadAndWrite
//        tableEnv.sqlQuery("select t1.id,t1.name,t2.sex from t1 right join t2 on t1.id=t2.id")
//                .execute()
//                .print();

        //右外连接 左表:OnReadAndWrite     右表:OnReadAndWrite
//        tableEnv.sqlQuery("select t1.id,t1.name,t2.sex from t1 full join t2 on t1.id=t2.id")
//                .execute()
//                .print();
    }
}
