package com.myflink.day10;

import com.myflink.bean.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @author Shelly An
 * @create 2020/9/27 14:10
 */
public class SQL_TableAPI_Window {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        //读取数据、转换
        DataStreamSource<String> socketDS = env.readTextFile("input/sensor-data.log");
        SingleOutputStreamOperator<WaterSensor> sensorDS = socketDS
                .map(new MapFunction<String, WaterSensor>() {
                    @Override
                    public WaterSensor map(String value) throws Exception {
                        String[] datas = value.split(",");
                        return new WaterSensor(datas[0], Long.valueOf(datas[1]), Integer.valueOf(datas[2]));
                    }
                })
                .assignTimestampsAndWatermarks(
                        new BoundedOutOfOrdernessTimestampExtractor<WaterSensor>(Time.seconds(3)) {
                            @Override
                            public long extractTimestamp(WaterSensor element) {
                                return element.getTs() * 1000L;
                            }
                        }
                );

        /*---------------------------------------------------------------------------------------*/
        //1. 创建 表执行环境

        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useOldPlanner() //使用官方的planner
                .inStreamingMode() //默认流 可设置批
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
        /**
         * 使用TableAPI开窗 Group Window
         *
         * 1. 要在字段里指定用来分组（按时间间隔）或者排序（按行数）的时间字段 rowtime proctime，还可以起个别名
         * 2. table调用window方法
         *   2.1 指定窗口类型 tumble、slide、session
         *   2.2 指定窗口的参数 over("xxx.minutes")窗口长度、every("xxx.minutes")滑动步长
         *   2.3 指定用来分组（按时间间隔）或者排序（按行数）的时间字段  指定为rowtime或proctime的字段
         *   2.4 给窗口起一个别名
         * 3. 必须把窗口放在分组字段里
         * 4. 可以用窗口.start，窗口.end获取窗口的开始时间和结束时间
         */
        tableEnv.createTemporaryView("sensorTable", sensorDS, "id,ts.rowtime as rt,vc");

        Table sensorTable = tableEnv.from("sensorTable");

        //使用tableapi
        Table resultTable = sensorTable
                .window(Tumble.over("5.seconds").on("rt").as("w"))
                //groupby要把窗口加上才能生效
                .groupBy("id,w")
                //id.count=count(id)
                .select("id,id.count,w.start,w.end");

        tableEnv.toRetractStream(resultTable, Row.class).print("GroupWindow Tumble");


        Table result1Table = sensorTable
                .window(Slide.over("5.seconds").every("3.seconds").on("rt").as("w"))
                //groupby要把窗口加上才能生效
                .groupBy("id,w")
                //id.count=count(id)
                .select("id,id.count,w.start,w.end");

        tableEnv.toRetractStream(result1Table, Row.class).print("GroupWindow Slide");


        /**
         * Over Window
         */

        //table api
        Table result2Table = sensorTable
                .window(
                        Over
                                .partitionBy("id")
                                .orderBy("rt")
                                .preceding("UNBOUNED_RANGE")
                                .following("CURRENT_RANGE")
                                .as("ow"))
                .select("id,count(id) over ow");

        tableEnv.toRetractStream(result2Table, Row.class).print("OverWindow TableAPI");


        //sql api
        Table result3Table = tableEnv.sqlQuery("select id," +
                "count(id) over (partition by id order by rows between 2 preceding and current row)" +
                " from sensorTable");

        tableEnv.toRetractStream(result3Table, Row.class).print("OverWindow SQL API");

        env.execute();
    }
}
