package tableapi;

import bean.SensorReading;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Over;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * @Description: TODO QQ1667847363
 * @author: xiao kun tai
 * @date:2021/11/8 22:06
 */
public class Table5_TimeAndWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String inputPath = "src/main/resources/sensor.txt";

        DataStream<String> inputStream = env.readTextFile(inputPath);

        DataStream<SensorReading> dataStream = inputStream.map(line -> {
            String[] fields = line.split(",");
            return new SensorReading(fields[0], new Long(fields[1]), new Double(fields[2]));
        })
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<SensorReading>(Time.seconds(2)) {
                    @Override
                    public long extractTimestamp(SensorReading sensorReading) {
                        return sensorReading.getTimestamp() * 1000L;
                    }
                });

        /**
         * 将流转换成表，定义时间特征
         */
//        Table dataTable = tableEnv.fromDataStream(dataStream, "id,timestamp as ts,temperature as temp,pt.proctime");
        Table dataTable = tableEnv.fromDataStream(dataStream, "id,timestamp as ts,temperature as temp,rt.rowtime");


        tableEnv.createTemporaryView("sensor", dataTable);

        /**
         * 窗口操作
         * Group Window Table Api
         */
        Table resultTable = dataTable.window(Tumble.over("10.seconds")
                .on("rt")
                .as("tw")
        )
                .groupBy("id,tw")
                .select("id,id.count,temp.avg,tw.end");

        //SQL
        Table resultSqlTable = tableEnv.sqlQuery("select id,count(id) as cnt,avg(temp) as avgTemp,tumble_end(rt,interval '10' second)" +
                "from sensor group by id,tumble(rt,interval '10' second)");


        /**
         * Over Window
         * Table Api
         */
        Table overResultTable = dataTable.window(Over
                .partitionBy("id")
                .orderBy("rt")
                .preceding("2.rows")
                .as("ow")
        )
                .select("id,rt,id.count over ow,temp.avg over ow");

        //SQL
        Table overSqlTable = tableEnv.sqlQuery("select id,rt,count(id) over ow," +
                "avg(temp) over ow from sensor " +
                "window ow as (partition by id order by rt rows between 2 preceding and current row)");

//        dataTable.printSchema();
//        tableEnv.toAppendStream(dataTable, Row.class).print();


//        tableEnv.toAppendStream(resultTable, Row.class).print("result");
//        tableEnv.toRetractStream(resultSqlTable, Row.class).print("sql");

//        tableEnv.toAppendStream(overResultTable, Row.class).print("result");
        tableEnv.toRetractStream(overSqlTable, Row.class).print("sql");


        env.execute();
    }
}
