package timeandwindow;

import com.atguigu.pojo.Event;
import com.atguigu.pojo.OrderDetailEvent;
import com.atguigu.pojo.OrderEvent;
import com.atguigu.pojo.UrlViewCount;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashSet;

/*
* 将两条流union后打印输出看结果
* 两条流中其中一条水位线推进很慢，使用withIdleness设置，当水位线推进过慢则不参考本条水位线
*
* 测试：模拟创建两条流
*
* */

public class Flink13_withIdleness {
     public static void main(String[] args) {
             StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
             env.setParallelism(1);

             env.getConfig().setAutoWatermarkInterval(1000L);//设置自动水位线时间

             //一共有两条流，流1:订单流
         SingleOutputStreamOperator<Event> ds1 = env.socketTextStream("hadoop102", 9999).name("sk9999")
                 .map(
                         line -> {
                             String[] fields = line.split(",");
                             return new Event(fields[0].trim(),fields[1].trim() ,Long.valueOf(fields[2].trim()));
                         }
                 ).assignTimestampsAndWatermarks(
                         WatermarkStrategy.<Event>forBoundedOutOfOrderness(Duration.ZERO) //延迟时间
                                 .withTimestampAssigner(
                                         (event,ts) -> event.getTs()
                                 )
                 .withIdleness(Duration.ofSeconds(10))
                 );
         ds1.print("ds1");

             //流2：订单详情流
          SingleOutputStreamOperator<Event> ds2 = env.socketTextStream("hadoop102", 8888).name("sk9999")
                 .map(
                         line -> {
                             String[] fields = line.split(",");
                             return new Event(fields[0].trim(),fields[1].trim() ,Long.valueOf(fields[2].trim()));
                         }
                 ).assignTimestampsAndWatermarks(
                         WatermarkStrategy.<Event>forBoundedOutOfOrderness(Duration.ZERO) //延迟时间
                                 .withTimestampAssigner(
                                         (event,ts) -> event.getTs()
                                 )
                 .withIdleness(Duration.ofSeconds(10))
                 );
         ds2.print("ds2");

         DataStream<Event> unionDs = ds1.union(ds2);//注意流1和流2类型不统一会报错，亲测

         //案例要求：统计每10秒内每个url的点击次数
         //窗口选择：按键分区事件语义滚动窗口
         unionDs
                 .keyBy(event -> event.getUrl())
                 .window(
                         TumblingEventTimeWindows.of(Time.seconds(10))
                 )
                 .aggregate(
                         new AggregateFunction<Event, UrlViewCount, UrlViewCount>() {

                             @Override
                             public UrlViewCount createAccumulator() {
                                 return new UrlViewCount(null, null, null, 0L);
                             }

                             @Override
                             public UrlViewCount add(Event event, UrlViewCount urlViewCount) {
                                 urlViewCount.setCount(urlViewCount.getCount() + 1);
                                 return urlViewCount;
                             }

                             @Override
                             public UrlViewCount getResult(UrlViewCount urlViewCount) {
                                 return urlViewCount;
                             }

                             @Override
                             public UrlViewCount merge(UrlViewCount urlViewCount, UrlViewCount acc1) {
                                 return null;
                             }
                         },
                         new ProcessWindowFunction<UrlViewCount, UrlViewCount, String, TimeWindow>() {
                             @Override
                             public void process(String key, ProcessWindowFunction<UrlViewCount, UrlViewCount, String, TimeWindow>.Context context, Iterable<UrlViewCount> elements, Collector<UrlViewCount> out) throws Exception {

                                 //相当于给新new的urlViewCount对象赋值
                                 //获得聚合结果
                                 //通过elements.iterator().next()获取当前窗口中的第一个聚合结果，
                                 // 并将其赋值给UrlViewCount类型的变量urlViewCount。
                                 UrlViewCount urlViewCount = elements.iterator().next();
                                 //设置url
                                 urlViewCount.setUrl(key);
                                 //设置窗口信息
                                 urlViewCount.setWindowStart(context.window().getStart());
                                 urlViewCount.setWindowEnd(context.window().getEnd());

                                 //将结果输出
                                 out.collect(urlViewCount);

                             }
                         }
                 )
                 .print();




         try {
                 env.execute();
             } catch (Exception e) {
                 throw new RuntimeException(e);
             }
         }
}
