package com.atguigu.flinksql.day12;

import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.time.Duration;

/**
 * ClassName: Test02
 * Package: com.atguigu.flinksql.day12
 * Description:
 *            2.写代码实现要求:设置TTL10s,  事件时间语义下  进行LEFT JOIN操作并说明以下输出结果:
 * 	          1.1 左表单独有数据;
 * 	          1.2 10s内右表有左表对应的数据到达;
 * 	          1.3 右表一直输入数据,且间隔时间不超过10s;
 * 	          1.4 右表停止输入数据,超过10s后再次输入数据.
 *
 * 	      测试数据：
 *                      {"id":"1001","name":"刘东","ts":13}
 *                      {"id":"1002","name":"陈俊","ts":14}
 *                      {"id":"1003","name":"花花","ts":15}
 *                      {"id":"1004","name":"海海","ts":16}
 *
 *
 *                      {"id":"1001","high":130.0,"ts":13}
 *                      {"id":"1002","high":140.0,"ts":14}
 *                      {"id":"1003","high":150.0,"ts":15}
 *                      {"id":"1004","high":160.0,"ts":16}
 * @Author ChenJun
 * @Create 2023/4/21 9:00
 * @Version 1.0
 */
public class Test02 {
    public static void main(String[] args) throws Exception {

        //1. 创建流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 设置TTL10s,
        TableConfig config = tableEnv.getConfig();
        config.setIdleStateRetention(Duration.ofSeconds(10));

        //2. 建表
        tableEnv.executeSql(""+
                "CREATE TABLE t2_left( \n" +
                "    id string, \n" +
                "    name string,\n" +
                "    ts  bigint,\n" +
                "    rt AS TO_TIMESTAMP_LTZ(ts,0),\n" +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'test1',\n" +
                "  'scan.startup.mode' = 'group-offsets',\n" +
                "  'sink.partitioner' = 'fixed',\n" +
                "  'topic' = 'test1109',\n" +
                "  'format' = 'json'\n" +
                ")");

        tableEnv.executeSql(""+
                "CREATE TABLE t2_right(\n" +
                "\tid string,\n" +
                "\thigh Double,\n" +
                "\tts  bigint,\n" +
                "\trt AS TO_TIMESTAMP_LTZ(ts,0),\n" +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND\n" +
                ")WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'test1',\n" +
                "  'scan.startup.mode' = 'group-offsets',\n" +
                "  'sink.partitioner' = 'fixed',\n" +
                "  'topic' = 'test1109',\n" +
                "  'format' = 'json'\n" +
                ")");

        //3. 查询
        Table table = tableEnv.sqlQuery("" +
                "select \n" +
                "    tl.id , \n" +
                "    name ,\n" +
                "    high \n" +
                "from t2_left tl\n" +
                "left join\n" +
                "   t2_right tr\n" +
                "on tl.id = tr.id");

        //转换为流
        DataStream<Tuple2<Boolean, Row>> tuple2DataStream = tableEnv.toRetractStream(table, Row.class);

        //打印
        tuple2DataStream.print();

        //执行
        env.execute();

    }
}
