package example.stream;


import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class StreamExample {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); //设置时间语义为事件时间
        //状态后端
//        env.setStateBackend(new MemoryStateBackend());
//        env.setStateBackend(new FsStateBackend(""));
//        env.setStateBackend(new RocksDBStateBackend(""));
        env.enableCheckpointing(300); //开启检查点
        //配置
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointingMode(CheckpointConfig.DEFAULT_MODE);
        checkpointConfig.setCheckpointTimeout(100);
        checkpointConfig.setMaxConcurrentCheckpoints(4); //同时多少checkpoint在进行

        //参数工具
//        ParameterTool parameterTool = ParameterTool.fromArgs(args);
//        parameterTool.get("key");
        /**
         * source
         */
        //读取文本
//        DataStream<String> source = env.readTextFile("D:\\Project\\flinkExample\\src\\main\\resources\\name.txt");
        //socket
        DataStreamSource<String> source = env.socketTextStream("47.95.141.170", 7777);
//        source.flatMap(new MyFlatMap()).keyBy(0).sum(1).print();
        //source<->集合
//        DataStream<People> dataStream = env.fromCollection(Arrays.asList(
//                new People("周周", 18, System.currentTimeMillis()),
//                new People("溜溜", 28, System.currentTimeMillis()),
//                new People("琦琦", 3, System.currentTimeMillis())));

        //source<->kafka
//        Properties properties = new Properties();
//        properties.setProperty("bootstrap.servers", "10.82.82.234:9092");
//        properties.setProperty("group.id", "es.group.id.test");
//        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        properties.setProperty("auto.offset.reset", "latest");
//        DataStream<String> source = env.addSource(new FlinkKafkaConsumer011<String>("people", new SimpleStringSchema(), properties));

        //customize source
//        DataStream<People> map = env.addSource(new MySourceFunction());

        SingleOutputStreamOperator<People> map = source.map(a -> {
            String[] split = a.split(",");
            return new People(split[0], Integer.parseInt(split[1]), Long.parseLong(split[2]), Integer.parseInt(split[3]));
        })
                //设置事件时间字段(无延时时间   顺序数据)
//                .assignTimestampsAndWatermarks(new AscendingTimestampExtractor<People>() {
//                    @Override
//                    public long extractAscendingTimestamp(People element) {
//                        return element.getTimestamp() * 1000;
//                    }
//                })
                //设置事件时间字段(有延时时间   乱序数据)
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<People>(Time.seconds(2)) {
                    @Override
                    public long extractTimestamp(People element) {
                        return element.getTimestamp() * 1000;
                    }
                });
//        KeyedStream<People, Tuple> age = map.keyBy("sex");
//        KeyedStream<People, Integer> peopleIntegerKeyedStream = map.keyBy(People::getAge);

//        age.max("timestamp").print(); //只替换timestamp为分组中最大的，其他字段不变(name)
//        age.maxBy("timestamp").print(); //与max相比其他字段换变（name也会更新为最大时间戳所对应的）
//        age.sum("timestamp").print("sum");//累加时间戳
//        age.reduce((a, b) -> new People(b.getName(), Math.max(a.getAge(), b.getAge()), b.getTimestamp(), b.getSex())).print();  //获取分组后age最大，timestamp最大，name，sex最新（最后接收到）的组合

        //分流，分流后select获取
//        SplitStream<People> split = map.split(a -> (a.getAge() >= 18) ? Collections.singleton("adult") : Collections.singleton("underage"));
//        DataStream<People> adult = split.select("adult");
//        DataStream<People> underage = split.select("underage");
//        DataStream<People> all = split.select("underage", "adult");
//
//        SingleOutputStreamOperator<Tuple2<String, Integer>> adultNameAndAge = adult.map(new MapFunction<People, Tuple2<String, Integer>>() {
//            @Override
//            public Tuple2<String, Integer> map(People people) throws Exception {
//                return new Tuple2<>(people.getName(), people.getAge());
//            }
//        });
//
//        //合并流    **两个流好像不能交互？**
//        ConnectedStreams<Tuple2<String, Integer>, People> connect = adultNameAndAge.connect(underage);
//        connect.map(new CoMapFunction<Tuple2<String, Integer>, People, Object>() {
//            @Override
//            public Object map1(Tuple2<String, Integer> value) throws Exception {
//                return new Tuple2<>(value.f0, value.f1);
//            }
//
//            @Override
//            public Object map2(People value) throws Exception {
//                return new Tuple3<>(value.getName(), value.getAge(), value.getTimestamp());
//            }
//        }).print();
//        //可合并多条流（限制类型一致）
//        adult.union(underage,all).print("union");

        //生命周期函数
//        map.map(new MyRichMap()).print("rick map");
        //重分区函数 global() -> 分到一个算子里
//        map.global().print();

//        SingleOutputStreamOperator<String> stringMap = source.map(a -> {
//            String[] split = a.split(",");
//            return new People(split[0], Integer.parseInt(split[1]), Long.parseLong(split[2]), Integer.parseInt(split[3])).toString();
//        });
        /**
         * sink
         */
//        //sink - kafka
//        stringMap.addSink(new FlinkKafkaProducer011<String>("localhost:9002", "sink_topic", new SimpleStringSchema()));
        //sink - redis
//        FlinkJedisPoolConfig config = new FlinkJedisPoolConfig.Builder()
//                .setHost("172.172.172.95")
//                .setPort(6379)
//                .setDatabase(15)
//                .build();
//        map.addSink(new RedisSink<>(config, new MyRedisMapper()));
        //es -> sink
//        List<HttpHost> httpHosts = new ArrayList<>();
//        httpHosts.add(new HttpHost("10.108.36.39", 9200));
//        ElasticsearchSink<People> build = new ElasticsearchSink.Builder<>(httpHosts, new MyEsSinkFunction()).build();
//        map.addSink(build);

        //jdbc -> sink
//        map.addSink(new MyJdbcSinkFunction());
        /**
         * 窗口
         */
//        map.keyBy("age")
        //滑动窗口
//                .timeWindow(Time.seconds(15)) //同下一行
//                .window(TumblingProcessingTimeWindows.of(Time.seconds(15))) //同上一行
        //滚动窗口
        //        .timeWindow(Time.seconds(15),Time.seconds(5))//同下一行
        //        .window(SlidingProcessingTimeWindows.of(Time.seconds(15),Time.seconds(5))) //同上一行
        //滑动计数窗口
//                    .countWindow
        //滚动计数窗口
//                .countWindow(15,5);

        //增量聚合函数aggregate()
//        map.keyBy("age")
//                .timeWindow(Time.seconds(15))
//                .aggregate(new MyAggregateFunction()).print();
        //全窗口函数聚合（暂存 窗口到了统一处理）process()/apply()
//        map.keyBy("age")
//                .timeWindow(Time.seconds(15))
//                .apply((WindowFunction<People, Tuple3<Integer, Long, Integer>, Tuple, TimeWindow>) (tuple, window, input, out) -> {
//                    List list = IteratorUtils.toList(input.iterator());
//                    list.forEach(System.out::println);
//                    Tuple3<Integer,Long,Integer> tuple3 = new Tuple3<>();
//                    tuple3.f0 =(Integer) ((Tuple1)tuple).f0;
//                    tuple3.f1 = window.getEnd();
//                    tuple3.f2 = list.size();
//                    out.collect(tuple3);
//                }).print();
        //计数滑动窗口 (计算)  用age分组后计算timestamp字段平均值
//        map.keyBy("age")
//                .countWindow(10, 2)
//                .aggregate(new MyAvgAggregateFunction()).print()
//        ;

        //对迟到数据的处理（事件时间下有效（时间语义））allowedLateness();sideOutputLateData()
//        OutputTag<People> late = new OutputTag<>("late");
//        SingleOutputStreamOperator<People> sum = map.keyBy("age")
//                .timeWindow(Time.seconds(10))
//                .allowedLateness(Time.minutes(1))  //窗口到期后延迟1分组关闭 延时的时间(1min)内的数据实时输出
//                .sideOutputLateData(late) //延期时间到后，窗口关闭 数据进入该侧输出流
//                .sum("timestamp");
//        sum.getSideOutput(late); //获取到侧输出流
        /**
         *
         * 时间语义
         *
         * Watermark
         *
         * 事件时间  -  数据产生的时间
         * 进入时间  -  数据进入flink系统的时间
         * 处理时间  -  算子处理数据的时间
         */

//        map.keyBy("sex")
//                .timeWindow(Time.seconds(10))
//                .min("age").print();

        /**
         * 状态管理
         */
        //算子状态(无法区分keyby)
//        map.map(new MyMapFunction()).print();
        //键控状态(可以对keyby后不同分组进行计数)
//        map.keyBy("sex")
//                .map(new MyKeyControllerMapFunction()).print();
        //相同性别下连续的两个人的年龄差超过<10>则返回报警
//        map.keyBy("sex").map(new MyBusinessMapFunction()).print();
        //只要报警数据  不需要正常数据  使用flatMap
//        map.keyBy("sex").flatMap(new MyBusinessFlatMapFunction(10)).print();
        /**
         * process  function
         */
        //10s内年龄连续增加则报警
//        map.keyBy("sex").process(new MyProcessFunction(10L)).print();
        //侧输出流
        OutputTag<People> outputTag = new OutputTag<People>("underline") {
        };
        SingleOutputStreamOperator<People> process =
                map.process(new ProcessFunction<People, People>() {
                                @Override
                                public void processElement(People value, Context ctx, Collector<People> out) throws Exception {
                                    if (value.getAge() > 17) {
                                        out.collect(value);
                                    } else {
                                        ctx.output(outputTag, value);
                                    }
                                }
                            }
                );
        process.getSideOutput(outputTag).print("underline");
        process.print("adult");
        env.execute("example job"); //job name
    }

    //自定义 source
    public static class MySourceFunction implements SourceFunction<People> {
        int a = 0;

        @Override
        public void run(SourceContext<People> sourceContext) throws Exception {
            while (a < 100) {
                People people = new People("周周", (int) (Math.random() * 10), System.currentTimeMillis(), 1);
                System.out.println("source->" + people);
                Thread.sleep(1000L);
                sourceContext.collect(people);
            }
        }

        @Override
        public void cancel() {
            System.out.println("cancel---");
        }
    }

    public static class MyFlatMap implements FlatMapFunction<String, Tuple2<String, Integer>> {
        @Override
        public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
            String[] split = s.split(",");
            for (String s1 : split) {
                collector.collect(new Tuple2<>(s1, 1));
            }
        }
    }

    //<rick> 方法 相比较非rich方法可以获取应用上下文对象 以及包含部分生命周期方法
    public static class MyRichMap extends RichMapFunction<People, Tuple2<String, Integer>> {

        @Override
        public Tuple2<String, Integer> map(People people) throws Exception {
            getRuntimeContext();  //获取上下文对象
            return new Tuple2<>(people.getName(), getRuntimeContext().getIndexOfThisSubtask());
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            System.out.println("open------");
        }

        @Override
        public void close() throws Exception {
            System.out.println("close----");
        }
    }

    /**
     * redis - sink  mapper
     * map
     * key -  name
     * value  - People
     */
    public static class MyRedisMapper implements RedisMapper<People> {

        @Override
        public RedisCommandDescription getCommandDescription() {
            return new RedisCommandDescription(RedisCommand.HSET, "flink_people");
        }

        @Override
        public String getKeyFromData(People data) {
            return data.getName();
        }

        @Override
        public String getValueFromData(People data) {
            return data.toString();
        }
    }

    //自定义 sink  ->  es
    public static class MyEsSinkFunction implements ElasticsearchSinkFunction<People> {
        @Override
        public void process(People element, RuntimeContext ctx, RequestIndexer indexer) {
            Map<String, Object> value = new HashMap<>();
            value.put("name", element.getName());
            value.put("age", element.getAge());
            value.put("timestamp", element.getTimestamp());
            value.put("sex", element.getSex());
            IndexRequest indexRequest = Requests.indexRequest().index("flink_es").source(value); //es api
            indexer.add(indexRequest);
        }
    }

    //自定义 sink  ->  mysql  每个算子单独持有一个该对象
    public static class MyJdbcSinkFunction extends RichSinkFunction<People> {
        Connection connection = null;
        PreparedStatement insert = null;
        PreparedStatement update = null;

        @Override
        public void open(Configuration parameters) throws Exception {
            System.out.println("connection--");
            connection = DriverManager.getConnection("jdbc:mysql://192.168.99.82:3306/app_daqs?useUnicode=true&characterEncoding=utf8&useSSL=false&useTimezone=true&serverTimezone=GMT%2B8",
                    "app_daqs",
                    "App_daqs.123");
            insert = connection.prepareStatement("insert into flink_people_test(name,age) value (?,?)");
            update = connection.prepareStatement("update flink_people_test set name = ? where age = ?");
        }

        @Override
        public void invoke(People value, Context context) throws Exception {
            update.setString(1, value.getName());
            update.setInt(2, value.getAge());
            update.execute();
            if (update.getUpdateCount() == 0) {
                insert.setString(1, value.getName());
                insert.setInt(2, value.getAge());
                insert.execute();
            }
        }

        @Override
        public void close() throws Exception {
            connection.close();
            insert.close();
            update.close();
        }
    }

    //
    static class MyAggregateFunction implements AggregateFunction<People, Integer, Integer> {
        @Override
        public Integer createAccumulator() {
            return 0;
        }

        @Override
        public Integer add(People value, Integer accumulator) {
            return accumulator + 1;
        }

        @Override
        public Integer getResult(Integer accumulator) {
            return accumulator;
        }

        @Override
        public Integer merge(Integer a, Integer b) {
            return a + b;
        }
    }

    public static class MyAvgAggregateFunction implements AggregateFunction<People, Tuple2<Double, Integer>, Tuple2<Integer, Double>> {
        int age = 0;  //若同一个算子一直持有一个AggregateFunction对象则正确否则错误

        @Override
        public Tuple2<Double, Integer> createAccumulator() {
            return new Tuple2<>(0.0, 0);
        }

        @Override
        public Tuple2<Double, Integer> add(People value, Tuple2<Double, Integer> accumulator) {
            age = value.getAge();
            return new Tuple2<>(value.getTimestamp() + accumulator.f0, accumulator.f1 + 1);
        }

        @Override
        public Tuple2<Integer, Double> getResult(Tuple2<Double, Integer> accumulator) {
            return new Tuple2<>(age, accumulator.f0 / accumulator.f1);
        }

        @Override
        public Tuple2<Double, Integer> merge(Tuple2<Double, Integer> a, Tuple2<Double, Integer> b) {
            return new Tuple2<>(a.f0 + b.f0, a.f1 + b.f1);
        }
    }


    /**
     * 带故障处理的map  对保存在内存中的count做容错snapshotState()保存快照值restoreState()故障恢复（某一个算子故障后恢复到其他算子）
     * 该对象每个算子单独持有一个。并行度为4时输出内容如下：
     * 2> 1
     * 3> 1
     * 4> 1
     * 1> 1
     * 2> 2
     * 3> 2
     * 4> 2
     * 1> 2
     * 2> 3
     * 3> 3
     * 4> 3
     */
    public static class MyMapFunction implements MapFunction<People, Integer>, ListCheckpointed<Integer> {
        //算子状态
        private Integer count = 0;

        @Override
        public Integer map(People value) throws Exception {
            return ++count;
        }

        @Override
        public List<Integer> snapshotState(long checkpointId, long timestamp) throws Exception {
            return Collections.singletonList(count);
        }

        @Override
        public void restoreState(List<Integer> state) throws Exception {
            for (Integer integer : state) {
                count += integer;
            }
        }
    }


    /**
     * 键控状态
     * <p>
     * 多算子情况下 keyby到同一组的数据会一直由同一个算子处理  hash后取模
     * <p>
     * value-state
     * <p>
     * list-state
     * <p>
     * <p>
     * map-state
     * <p>
     * <p>
     * reducing-state
     */
    public static class MyKeyControllerMapFunction extends RichMapFunction<People, Integer> {
        private ValueState<Integer> count;
        private ListState<String> listState;

        @Override
        public void open(Configuration parameters) throws Exception {
            count = getRuntimeContext().getState(new ValueStateDescriptor<Integer>("key-count", Integer.class));
            listState = getRuntimeContext().getListState(new ListStateDescriptor<String>("list-count", String.class));
        }

        @Override
        public Integer map(People value) throws Exception {
            Integer integer = count.value();
            if (integer == null) {
                integer = 0;
            }
            integer++;
            count.update(integer);
            return integer;
        }

    }

    /**
     * 相同性别下连续的两个人的年龄差超过10则返回报警
     */
    public static class MyBusinessMapFunction extends RichMapFunction<People, Tuple3<Integer, Integer, String>> {
        private ValueState<Integer> preAge;

        @Override
        public void open(Configuration parameters) throws Exception {
            preAge = getRuntimeContext().getState(new ValueStateDescriptor<Integer>("pre-age", Integer.class));
        }

        @Override
        public Tuple3<Integer, Integer, String> map(People value) throws Exception {
            Integer preAgeValue = preAge.value();
            preAge.update(value.getAge());
            if (preAgeValue == null) {
                return new Tuple3<>(0, value.getAge(), "正常");
            }
            return Math.abs(preAgeValue - value.getAge()) > 10
                    ? new Tuple3<>(preAgeValue, value.getAge(), "报警")
                    : new Tuple3<>(preAgeValue, value.getAge(), "正常");
        }
    }

    /**
     * 相同性别下连续的两个人的年龄差超过10则返回报警
     */
    public static class MyBusinessFlatMapFunction extends RichFlatMapFunction<People, Tuple3<Integer, Integer, String>> {
        private Integer age;
        private ValueState<Integer> preAge;

        public MyBusinessFlatMapFunction(Integer age) {
            this.age = age;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            preAge = getRuntimeContext().getState(new ValueStateDescriptor<Integer>("pre-age", Integer.class));
        }

        @Override
        public void flatMap(People value, Collector<Tuple3<Integer, Integer, String>> out) throws Exception {
            Integer preAgeValue = preAge.value();
            preAge.update(value.getAge());
            if (preAgeValue == null) {
                return;
            }
            if (Math.abs(preAgeValue - value.getAge()) > 10) {
                out.collect(new Tuple3<>(preAgeValue, value.getAge(), "报警"));
            }
        }
    }

    //10s内年龄连续增加则报警
    public static class MyProcessFunction extends KeyedProcessFunction<Tuple, People, String> {

        private ValueState<Integer> preAge;

        private ValueState<Long> preTimer;

        private Long initGap;

        public MyProcessFunction(Long initGap) {
            this.initGap = initGap;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            preAge = getRuntimeContext().getState(new ValueStateDescriptor<Integer>("pre-age", Integer.class));
            preTimer = getRuntimeContext().getState(new ValueStateDescriptor<Long>("pre-timer", Long.class));
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
            out.collect("性别：" + ctx.getCurrentKey().getField(0) + "  10s连续上升");
            preTimer.clear();
        }


        @Override
        public void processElement(People value, Context ctx, Collector<String> out) throws Exception {
            Integer preAgeValue = preAge.value();
            Long preTimerValue = preTimer.value();
            preAge.update(value.getAge());
            if (preAgeValue == null) {
                preAgeValue = 0;
            }
            //上升
            if (value.getAge() > preAgeValue && preTimerValue == null) {
                long l = ctx.timerService().currentProcessingTime() + initGap * 1000;
                ctx.timerService().registerProcessingTimeTimer(l);
                preTimer.update(l);
                return;
            }
            //下降
            if (value.getAge() < preAgeValue && preTimerValue != null) {
                ctx.timerService().deleteProcessingTimeTimer(preTimerValue); //删除定时器
                preTimer.clear(); //清空状态
            }
        }

        @Override
        public void close() throws Exception {

        }
    }

}
