package com.atguigu.app;

import com.atguigu.bean.Bean1;
import com.atguigu.bean.Bean2;
import com.atguigu.utils.KafkaUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/*
测试内、左、右、满外连接
 */
public class FlinkSQLJoin {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
//获取默认的状态存储时间：
        System.out.println(tableEnv.getConfig().getIdleStateRetention());// PT0S：表示关闭该属性，也就是默认的是状态永久存储
//设置状态存储时间
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));

        SingleOutputStreamOperator<Bean1> bean1DS = env.socketTextStream("hadoop102", 8888)
                .map(line -> {
                    String[] fields = line.split(",");
                    return new Bean1(fields[0], fields[1], Long.parseLong(fields[2]));
                });

        SingleOutputStreamOperator<Bean2> bean2DS = env.socketTextStream("hadoop102", 9999)
                .map(line -> {
                    String[] fields = line.split(",");
                    return new Bean2(fields[0], fields[1], Long.parseLong(fields[2]));
                });

        //todo 将流转化为动态表
//            方式一：
//        Table table1  = tableEnv.fromDataStream(bean1DS);
//        //注册表名
//        tableEnv.createTemporaryView("t1",table1);

        //方式二：
        tableEnv.createTemporaryView("t1",bean1DS);

//        Table table2 = tableEnv.fromDataStream(bean2DS);
//        tableEnv.createTemporaryView("t2",table2);

        tableEnv.createTemporaryView("t2",bean2DS);

//        //内连接
//        tableEnv.executeSql("select t1.id,t1.name,t2.sex from t1 join t2 on t1.id=t2.id").print();

        //左外连接 左表:OnReadAndWrite     右表:OnCreateAndWrite
        Table table = tableEnv.sqlQuery("select t1.id,t1.name,t2.sex from t1 left join t2 on t1.id=t2.id");

        //todo 将left join后的结果写到kafka里
        //构建kafka表
        tableEnv.executeSql("create table upsert_kafka_table(" +
                "id string," +
                "name string," +
                "sex string," +
                "primary key (id) not enforced)"+ KafkaUtil.getKafkaUpsertSinkDDL("test1"));
        tableEnv.executeSql("insert into upsert_kafka_table select * from "+table);
    }
}
