package com.zhang.first.day08;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

/**
 * @title:
 * @author: zhang
 * @date: 2022/1/22 22:30
 */
public class Example2 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //设置检查点
        //env.enableCheckpointing(10 * 1000L);
        //env.setStateBackend(new FsStateBackend("file:///Users/apple/IdeaProjects/flink_1.13/src/main/resources/chbk"));

        //1.1 开启CK并指定状态后端为FS    memory  fs  rocksdb
        env.setStateBackend(new FsStateBackend("hdfs://hadoop302:8020/flink/ck"));
        env.enableCheckpointing(5000L);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(10000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000);
        System.setProperty("HADOOP_USER_NAME", "zhang");

        env
                .socketTextStream("hadoop302", 9999)
                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
                    @Override
                    public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                        String[] fields = value.split(" ");
                        for (String field : fields) {
                            out.collect(Tuple2.of(field, 1));
                        }
                    }
                })
                .keyBy(r -> r.f0)
                .process(new KeyedProcessFunction<String, Tuple2<String, Integer>, Tuple2<String, Integer>>() {
                    private ValueState<Tuple2<String, Integer>> valueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        valueState = getRuntimeContext().getState(
                                new ValueStateDescriptor<Tuple2<String, Integer>>(
                                        "acc",
                                        Types.TUPLE(Types.STRING, Types.INT)
                                )
                        );
                    }

                    @Override
                    public void processElement(Tuple2<String, Integer> value, KeyedProcessFunction<String, Tuple2<String, Integer>, Tuple2<String, Integer>>.Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {
                        if (valueState.value() == null) {
                            out.collect(value);
                            valueState.update(value);
                        } else {
                            int sum = valueState.value().f1 + value.f1;
                            out.collect(Tuple2.of(value.f0, sum));
                            valueState.update(Tuple2.of(value.f0, sum));
                        }
                    }
                })
                .print();

        env.execute();
    }
}
