package com.qezhhnjy.flink.job;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.resource.ResourceUtil;
import com.qezhhnjy.flink.bean.SensorReading;
import com.qezhhnjy.flink.mapper.MyRedisMapper;
import com.qezhhnjy.flink.source.MySensor;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.assigners.SessionWindowTimeGapExtractor;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Collections;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author zhaoyangfu
 * @date 2021/8/27-13:51
 */
public class StreamSensor {

    /**
     * sum/max/maxBy/min/minBy/reduce/filter
     */
    public static void handle() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.addSource(new MySensor())
                .map((MapFunction<SensorReading, Tuple3<String, Long, Double>>) value
                        -> Tuple3.of(value.getName(), value.getTime(), value.getTemp()))
                .returns(Types.TUPLE(Types.STRING, Types.LONG, Types.DOUBLE))
                .keyBy((KeySelector<Tuple3<String, Long, Double>, String>) value -> value.f0)
                // .maxBy(2)
                .filter((FilterFunction<Tuple3<String, Long, Double>>) value -> value.f2 > 10)
                // .reduce((ReduceFunction<Tuple3<String, Long, Double>>) (value1, value2)
                //         -> Tuple3.of(value1.f0, value2.f1, Math.atan2(value1.f2, value2.f2)))
                .print();
        env.execute("sensor");
    }

    /**
     * 新版本中split和select已经弃用。改用side output
     * 这里处理一个问题：将流按照温度 <10 10-20 >20 拆分到三个流中进行处理
     * 这种功能其实用filter也能实现，但是filter需要重复对整个数据流进行处理，侧输出的方式只需要一次就可以完成分流
     */
    public static void sideOutput() throws Exception {
        // 这里的泛型书写格式不能简化为new OutputTag<>("<10");这种方式,否则会报以下异常:
        // The types of the interface org.apache.flink.util.OutputTag could not be inferred.
        // Support for synthetic interfaces, lambdas, and generic or raw types is limited at this point
        OutputTag<SensorReading> min = new OutputTag<SensorReading>("<10") {
        };
        OutputTag<SensorReading> mid = new OutputTag<SensorReading>("10-20") {
        };
        OutputTag<SensorReading> max = new OutputTag<SensorReading>(">20") {
        };

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        SingleOutputStreamOperator<SensorReading> process = streamSource
                .process(new ProcessFunction<SensorReading, SensorReading>() {
                    @Override
                    public void processElement(SensorReading value, Context ctx, Collector<SensorReading> out) throws Exception {
                        double temp = value.getTemp();
                        if (temp < 10) {
                            ctx.output(min, value);
                        } else if (10 <= temp && 20 >= temp) {
                            ctx.output(mid, value);
                        } else if (temp > 20) {
                            ctx.output(max, value);
                        } else {
                            out.collect(value);
                        }
                    }
                });

        process.getSideOutput(max).print();
        env.execute();
    }

    /**
     * Connect和CoMap
     */
    public static void connect() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        DataStreamSource<SensorReading> streamSource2 = env.addSource(new MySensor());
        ConnectedStreams<SensorReading, SensorReading> connect = streamSource.connect(streamSource2);
        connect.map(new CoMapFunction<SensorReading, SensorReading, Tuple2<String, Double>>() {
            @Override
            public Tuple2<String, Double> map1(SensorReading value) throws Exception {
                return Tuple2.of("map1", value.getTemp());
            }

            @Override
            public Tuple2<String, Double> map2(SensorReading value) throws Exception {
                return Tuple2.of("map2", value.getTemp());
            }
        }).print();

        connect.flatMap(new CoFlatMapFunction<SensorReading, SensorReading, Tuple2<String, Double>>() {
            @Override
            public void flatMap1(SensorReading value, Collector<Tuple2<String, Double>> out) throws Exception {
                out.collect(Tuple2.of("flatMap1", value.getTemp()));
            }

            @Override
            public void flatMap2(SensorReading value, Collector<Tuple2<String, Double>> out) throws Exception {
                out.collect(Tuple2.of("flatMap2", value.getTemp()));
            }
        }).print();

        env.execute();
    }

    /**
     * union
     * Connect 与 Union 区别：
     * 1． Union 之前两个流的类型必须是一样，Connect 可以不一样，在之后的 coMap 中再去调整成为一样的。
     * 2. Connect 只能操作两个流，Union 可以操作多个。
     */
    public static void union() {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        DataStreamSource<SensorReading> streamSource2 = env.addSource(new MySensor());
    }

    /**
     * "富函数"是 DataStream API 提供的一个函数类的接口，所有 Flink 函数类都有其 Rich 版本。
     * 它与常规函数的不同在于，可以获取运行环境的上下文，并拥有一些生命周期方法，所以可以实现更复杂的功能。
     * ⚫ RichMapFunction
     * ⚫ RichFlatMapFunction
     * ⚫ RichFilterFunction
     * ⚫ …
     * Rich Function 有一个生命周期的概念。典型的生命周期方法有：
     * ⚫ open()方法是 rich function 的初始化方法，当一个算子例如 map 或者 filter 被调用之前 open()会被调用。
     * ⚫ close()方法是生命周期中的最后一个调用的方法，做一些清理工作。
     * ⚫ getRuntimeContext()方法提供了函数的 RuntimeContext 的一些信息，例如函数执行的并行度，任务的名字，以及 state 状态
     */
    public static void rich() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        streamSource.map(new MyRichMapFun()).print();
        env.execute();
    }

    public static void sinkToFile() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        streamSource.addSink(new SinkFunction<SensorReading>() {
            @Override
            public void invoke(SensorReading value, Context context) throws Exception {
                FileUtil.appendUtf8Lines(Collections.singletonList(value), ResourceUtil.getResource("hello.txt").getFile());
            }
        });
        env.execute();
    }

    public static void sinkToKafka() {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        SingleOutputStreamOperator<String> map = streamSource.map(SensorReading::toString);
        map.addSink(new FlinkKafkaProducer<>("localhost:9092", "kafka", new SimpleStringSchema()));
    }

    public static void sinkToRedis() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        FlinkJedisPoolConfig config = new FlinkJedisPoolConfig.Builder()
                .setHost("localhost")
                .setPort(6379)
                .setPassword("aims2016")
                .build();
        streamSource.addSink(new RedisSink<>(config, new MyRedisMapper()));
        env.execute();
    }

    /**
     * streaming 流式计算是一种被设计用于处理无限数据集的数据处理引擎，而无限数据集是指一种不断增长的本质上无限的数据集，
     * 而 window 是一种切割无限数据为有限块进行处理的手段。
     * Window 是无限数据流处理的核心，Window 将一个无限的 stream 拆分成有限大小的”buckets”桶，我们可以在这些桶上做计算操作。
     * Window 可以分成两类：
     * ➢ CountWindow：按照指定的数据条数生成一个 Window，与时间无关。
     * ➢ TimeWindow：按照时间生成 Window。对于 TimeWindow，可以根据窗口实现原理的不同分成三类：
     * 滚动窗口（TumblingWindow）、滑动窗口（Sliding Window）和会话窗口（Session Window）。
     */
    public static void window() {

    }

    public static void tumblingTimeWindow() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        streamSource.map((MapFunction<SensorReading, Tuple2<String, Double>>) value -> Tuple2.of(value.getName(), value.getTemp()))
                .returns(Types.TUPLE(Types.STRING, Types.DOUBLE))
                .keyBy(d -> d.f0)
                // 滚动时间窗
                .window(TumblingProcessingTimeWindows.of(Time.seconds(15)))
                .sum(1)
                .print();

        env.execute();
    }

    // 滑动时间窗口
    public static void slidingTimeWindow() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        streamSource.map((MapFunction<SensorReading, Tuple2<String, Double>>) value -> Tuple2.of(value.getName(), value.getTemp()))
                .returns(Types.TUPLE(Types.STRING, Types.DOUBLE))
                .keyBy(d -> d.f0)
                .window(SlidingProcessingTimeWindows.of(Time.seconds(15), Time.seconds(5)))
                .sum(1)
                .print("slidingTimeWindow");

        env.execute();
    }

    // session窗口
    public static void sessionWindow() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        streamSource.map((MapFunction<SensorReading, Tuple2<String, Double>>) value -> Tuple2.of(value.getName(), value.getTemp()))
                .returns(Types.TUPLE(Types.STRING, Types.DOUBLE))
                .keyBy(d -> d.f0)
                // 默认间隔2秒为一个session
                // .window(ProcessingTimeSessionWindows.withGap(Time.seconds(2)))
                // 根据数据动态间隔时间, > 10则session间隔为2秒，否则为1秒
                .window(ProcessingTimeSessionWindows.withDynamicGap((SessionWindowTimeGapExtractor<Tuple2<String, Double>>) element -> element.f1 > 10 ? 2000 : 1000))
                .min(1)
                .print("slidingTimeWindow");

        env.execute();
    }

    public static final AtomicInteger COUNT = new AtomicInteger(0);

    // 通过countWindow对数据求均值
    public static void countWindow() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> streamSource = env.addSource(new MySensor());
        streamSource.map((MapFunction<SensorReading, Tuple2<String, Double>>) value -> Tuple2.of(value.getName(), value.getTemp()))
                .returns(Types.TUPLE(Types.STRING, Types.DOUBLE))
                .keyBy(d -> d.f0)
                .countWindow(100)
                .aggregate(new AggregateFunction<Tuple2<String, Double>, Tuple3<String, Integer, Double>, Tuple2<String, Double>>() {
                    @Override
                    public Tuple3<String, Integer, Double> createAccumulator() {
                        // 这里根据keyBy会给每个key的分区生成对应的函数，即add中传入的数据都是同一个key的数据
                        System.out.println("创建新的聚合函数" + COUNT.incrementAndGet());
                        return new Tuple3<>("", 0, 0D);
                    }

                    @Override
                    public Tuple3<String, Integer, Double> add(Tuple2<String, Double> value, Tuple3<String, Integer, Double> accumulator) {
                        if (!accumulator.f0.equals("") && !Objects.equals(accumulator.f0, value.f0))
                            System.out.printf("%s==%s", accumulator.f0, value.f0);
                        accumulator.f0 = value.f0;
                        accumulator.f1 += 1;
                        accumulator.f2 += value.f1;
                        return accumulator;
                    }

                    @Override
                    public Tuple2<String, Double> getResult(Tuple3<String, Integer, Double> accumulator) {
                        return new Tuple2<>(accumulator.f0, accumulator.f2 / accumulator.f1);
                    }

                    @Override
                    public Tuple3<String, Integer, Double> merge(Tuple3<String, Integer, Double> a, Tuple3<String, Integer, Double> b) {
                        if (!Objects.equals(a.f0, b.f0)) System.out.printf("a:%s,b:%s", a.f0, b.f0);
                        return new Tuple3<>(a.f0, a.f1 + b.f1, a.f2 + b.f2);
                    }
                })
                .print("countWindow");

        env.execute();
    }

    public static class MyRichMapFun extends RichMapFunction<SensorReading, Double> {

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
        }

        @Override
        public void close() throws Exception {
            super.close();
        }

        @Override
        public Double map(SensorReading sensorReading) throws Exception {
            return sensorReading.getTemp();
        }
    }
}
