package com.itqiqi.api.tableapi;

import com.itqiqi.api.pojo.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.GroupWindowedTable;
import org.apache.flink.table.api.Over;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class TableTest5_TimeAndWindow {

    public static void main(String[] args) throws Exception {

        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        // 创建表环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 读取数据
        DataStreamSource<String> inputStream = env.readTextFile("input/sensor.txt");

        SingleOutputStreamOperator<SensorReading> dataStream = inputStream.map(new MapFunction<String, SensorReading>() {
            @Override
            public SensorReading map(String s) throws Exception {
                String[] words = s.split(",");
                return new SensorReading(words[0], new Long(words[1]), new Double(words[2]));
            }
        }).assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<SensorReading>(Time.seconds(1)) {
            @Override
            public long extractTimestamp(SensorReading element) {
                return element.getTimestamp() * 1000L;
            }
        });

        // TODO: 2022/5/18 流转换成表，定义时间特性
        Table sensorTable = tableEnv.fromDataStream(dataStream, "id, timestamp.rowtime as rt, temperature as temp");

        // TODO: 2022/5/18 分组窗口
        Table resTable = sensorTable.window(Tumble.over("10.seconds").on("rt").as("tw"))
                .groupBy("id, tw")
                .select("id, id.count, temp.avg, tw.end");

        // TODO: 2022/5/18 分组窗口 SQL
        tableEnv.createTemporaryView("sensorTable", sensorTable);

        Table sqlTable = tableEnv.sqlQuery("select id, count(id) as cnt, avg(temp) as avgTemp, tumble_end(rt, interval '10' second)" +
                " from sensorTable group by id, tumble(rt, interval '10' second)");

        // TODO: 2022/5/19 Over Window
        Table overTable = sensorTable.window(Over.partitionBy("id").orderBy("rt").preceding("2.rows").as("ow"))
                .select("id, rt, id.count over ow, temp.avg over ow");

        // TODO: 2022/5/19 Over Window SQL
        // 多行SQL时除第一行外其他行前一定要加空格！！！！
        Table sqlOverTable = tableEnv.sqlQuery("select id, rt, count(id) over ow, avg(temp) over ow" +
             // 👇
                " from sensorTable" +
                " window ow as (partition by id order by rt rows between 2 preceding and current row)");


        sensorTable.printSchema();

//        tableEnv.toAppendStream(resTable, Row.class).print("resTable");
//        tableEnv.toRetractStream(sqlTable, Row.class).print("sqlTable");

        tableEnv.toAppendStream(overTable, Row.class).print("overTable");
        tableEnv.toRetractStream(sqlOverTable, Row.class).print("sqlOverTable");

        env.execute();

    }
}
