package com.alison.datastream.chapter5_window;

import com.alison.tableapisql.chapter1_tableapiandsql.model.SensorReading;
import com.google.common.collect.Maps;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

public class E1_WindowTest1_TimeWindow {
    public static void main(String[] args) throws Exception {

        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 并行度设置1，方便看结果
        env.setParallelism(1);

        //        // 从文件读取数据
        //        DataStream<String> dataStream = env.readTextFile("/tmp/Flink_Tutorial/src/main/resources/sensor.txt");

        // 从socket文本流获取数据
        DataStream<String> inputStream = env.socketTextStream("192.168.56.101", 7777);
        // 转换成SensorReading类型
        DataStream<SensorReading> dataStream = inputStream.map(line -> {
            String[] fields = line.split(",");
            return new SensorReading(fields[0], new Long(fields[1]), new Double(fields[2]));
        });

        // 开窗测试
        // 1. 增量聚合函数 (这里简单统计每个key组里传感器信息的总数)
        DataStream<Map<String, Integer>> resultStream = dataStream.keyBy(SensorReading::getId)
                //                .countWindow(10, 2);
                //                .window(EventTimeSessionWindows.withGap(Time.minutes(1)));
                //                .window(TumblingProcessingTimeWindows.of(Time.seconds(15)))
                //                .timeWindow(Time.seconds(15)) // 已经不建议使用@Deprecated
                .window(TumblingProcessingTimeWindows.of(Time.seconds(15)))
                .aggregate(new AggregateFunction<SensorReading, Map<String, Integer>, Map<String, Integer>>() {
                    // 新建的累加器
                    @Override
                    public Map<String, Integer> createAccumulator() {
                        return Maps.newHashMap();
                    }

                    // 每个数据在上次的基础上累加
                    @Override
                    public Map<String, Integer> add(SensorReading sensorReading, Map<String, Integer> accumulator) {
                        Integer value = accumulator.getOrDefault(sensorReading.getId(), 0);
                        accumulator.put(sensorReading.getId(), value + 1);
                        return accumulator;
                    }

                    // 返回结果值
                    @Override
                    public Map<String, Integer> getResult(Map<String, Integer> accumulator) {
                        return accumulator;
                    }

                    // 分区合并结果(TimeWindow一般用不到，SessionWindow可能需要考虑合并)
                    @Override
                    public Map<String, Integer> merge(Map<String, Integer> acc1, Map<String, Integer> acc2) {
                        Map<String, Integer> resultAcc = Maps.newHashMap();
                        Set<String> alreadyElements = new HashSet<>();
                        acc1.forEach((k, v) -> {
                            if (acc2.containsKey(k)) {
                                resultAcc.put(k, acc2.get(k) + v);
                            }
                            alreadyElements.add(k);
                            resultAcc.put(k, acc2.getOrDefault(k, 0) + v);
                        });
                        acc2.forEach((k, v) -> {
                            if (!alreadyElements.contains(k)) {
                                resultAcc.put(k, v);
                            }
                        });
                        return resultAcc;
                    }

                });
        resultStream.print("result");
        env.execute();
    }
}