package com.atguigu.chapter11;

import com.atguigu.chapter5.source.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Over;
import org.apache.flink.table.api.OverWindow;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.junit.Before;
import org.junit.Test;

import java.time.Duration;

import static org.apache.flink.table.api.Expressions.*;

/**
 * @ClassName: Flink11_Group_Window_Use
 * @Description:
 * @Author: kele
 * @Date: 2021/4/13 12:39
 * <p>
 *
 *  1、UNBOUNDED_ROW按照行，按照第一行的数据开始计算，  UNBOUND_RANGE：使用从刚开始时间开始计算
 *  2、rowInterval(Long n))   从前n行开始计算
 *  3、lit()    从多长时间之前开始计算
 *
 **/
public class Flink12_Over_Window_Table {

    private StreamTableEnvironment tenv;
    private StreamExecutionEnvironment env;
    private Table table;

    @Before
    public void before() {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);

        env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        tenv = StreamTableEnvironment.create(env);

        env.setParallelism(1);

        SingleOutputStreamOperator<WaterSensor> ds =
                env
                        //        .socketTextStream("hadoop162",8888)

                        .readTextFile("in/sensor.txt")
                        .map(line -> {
                            String[] split = line.split(",");
                            return new WaterSensor(split[0], Long.valueOf(split[1]), Integer.valueOf(split[2]));
                        }).assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<WaterSensor>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                                .withTimestampAssigner((element, recordTimestamp) -> element.getTs() * 1000)
                );

        table = tenv.fromDataStream(ds, $("id"), $("ts").rowtime(), $("vc"));
    }

    @Test
    public void query() {

        //  UNBOUNDED_ROW按照行，按照第一行的数据开始计算，  UNBOUND_RANGE：使用从刚开始时间开始计算
/*        OverWindow overWindow = Over.partitionBy($("id")).orderBy($("ts"))
                                  .preceding(UNBOUNDED_ROW).as("owin");*/

        //从前n行开始
   /*     OverWindow overWindow = Over.partitionBy($("id")).orderBy($("ts"))
                .preceding(rowInterval(1l)).as("owin");*/

        //从刚开始的时间计算
   /*     OverWindow overWindow = Over.partitionBy($("id")).orderBy($("ts"))
                .preceding(UNBOUNDED_RANGE).as("owin");*/


        //从前两秒开始计算，包括两秒
        OverWindow overWindow = Over.partitionBy($("id")).orderBy($("ts"))
                .preceding(lit(2).second()).as("owin");


        table.window(overWindow)
                .select($("id"), $("ts"), $("vc").sum().over($("owin")))
                .execute()
                .print();


    }


}
