package com.atguigu.chapter11;

import com.atguigu.chapter05.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

import static org.apache.flink.table.api.Expressions.$;

/**
 * TODO
 *
 * @author cjp
 * @version 1.0
 * @date 2021/3/12 9:30
 */
public class Flink20_SQL_OverWindow_EventTime {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        SingleOutputStreamOperator<WaterSensor> sensorDS = env
                .socketTextStream("localhost", 9999)
                .map(new MapFunction<String, WaterSensor>() {
                    @Override
                    public WaterSensor map(String value) throws Exception {
                        // 切分
                        String[] line = value.split(",");
                        return new WaterSensor(line[0], Long.parseLong(line[1]), Integer.parseInt(line[2]));

                    }
                })
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<WaterSensor>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner((value, ts) -> value.getTs() * 1000L)
                );


        // TODO - SQL — OverWindow（就是 类似 hive sql的 开窗函数 over(partition by order by preceding following) ）
        // 1.创建 表的执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 2.将 流 转换成 Table
        tableEnv.createTemporaryView("sensor", sensorDS,$("id"),$("ts"),$("vc"),$("et").rowtime());

        tableEnv
//                .sqlQuery("select " +
//                        "id," +
//                        "sum(vc) over(partition by id order by et rows between 1 preceding and current row) as vcSum " +
//                        "from sensor")
                // flinksql 不支持同时开多个 over窗口，但是可以 先定义一个，多次使用
                .sqlQuery("select " +
                        "id," +
                        "sum(vc) over ow as vcSum," +
                        "count(id) over ow as idCount " +
                        "from sensor " +
                        "window ow as (partition by id order by et rows between 1 preceding and current row)")
                .execute()
                .print();





        env.execute();
    }
}

/**
 */
