package com.atguigu.app;

import com.atguigu.bean.WaterSensor;
import com.atguigu.bean.WaterSensor2;
import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @className: Test3_SQL_Join
 * @author: LinCong
 * @description:
 * @date: 2023/1/30 11:34
 * @version: 1.0
 */
public class Test6_Upsert_Kafka {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
//        默认不过期
        System.out.println(tableEnv.getConfig().getIdleStateRetention());
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));

        //1001,23.6,1324
        SingleOutputStreamOperator<WaterSensor> waterSensorDS1 = env.socketTextStream("hadoop3-01", 1111)
                .map(line -> {
                    String[] split = line.split(",");
                    return new WaterSensor(split[0],
                            Double.parseDouble(split[1]),
                            Long.parseLong(split[2]));
                });

        //1001,kkk,1324
        SingleOutputStreamOperator<WaterSensor2> waterSensorDS2 = env.socketTextStream("hadoop3-01", 2222)
                .map(line -> {
                    String[] split = line.split(",");
                    return new WaterSensor2(split[0],
                            split[1],
                            Long.parseLong(split[2]));
                });

//        将流转化为动态表
        tableEnv.createTemporaryView("t1",waterSensorDS1);
        tableEnv.createTemporaryView("t2",waterSensorDS2);

        Table resultTable = tableEnv.sqlQuery("select t1.id,t1.vc,t2.id,t2.name from t1 full join t2 on t1.id=t2.id");
        tableEnv.createTemporaryView("result_table",resultTable);

//        创建UpsertKafka表
        tableEnv.executeSql("create table upsert_test(" +
                " t1_id string," +
                " vc double," +
                " t2_id string," +
                " name string," +
                " PRIMARY KEY (t1_id) NOT ENFORCED ) "+ MyKafkaUtil.getUpsertKafkaDDL("test"));

//        将数据写入kafka
        tableEnv.executeSql("insert into upsert_test select * from result_table")
        .print();

    }
}
