package cn.doitedu.flink.state;

import cn.doitedu.flink.sources.RandomString;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import scala.Int;

import java.net.URI;

public class Demo {
    public static void main(String[] args) throws Exception {
        ParameterTool params = ParameterTool.fromArgs(args);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.getConfig().setGlobalJobParameters(params);
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        StateBackend fsStateBackend = new FsStateBackend(new URI("file:///f:/flinkcheck"));
        env.setStateBackend(fsStateBackend);
        DataStreamSource<String> sds = env.addSource(new RandomString());
        //sds.print();
        SingleOutputStreamOperator<Tuple2<String, Integer>> map = sds
                .setParallelism(1)
                .map(new MapFunction<String, Tuple2<String,String>>() {
                    @Override
                    public  Tuple2<String,String> map(String value) throws Exception {
                        String[] split = value.split(",");
                        Tuple2<String, String> tp = Tuple2.of(split[0],split[1]);
                        return tp;
                    }
                })
                .keyBy(0)
                .map(new RichMapFunction<Tuple2<String,String>, Tuple2<String, Integer>>() {
                    ValueState<Tuple2<String,Integer>> state;
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        StateTtlConfig.Builder builder = StateTtlConfig.newBuilder(Time.minutes(5));
                        StateTtlConfig build = builder.build();

                        ValueStateDescriptor des = new ValueStateDescriptor("des", TypeInformation.of(new TypeHint<Tuple2<String, Integer>>() {
                        }));
                        des.enableTimeToLive(build);
                        state = getRuntimeContext().getState(des);
                    }
                    @Override
                    public Tuple2<String, Integer> map(Tuple2<String,String> value) throws Exception {
                        Tuple2<String, Integer> curState = state.value();
                        if(curState == null) {
                            curState = new Tuple2<String,Integer>(value.f1,0);
                        }
                        Tuple2<String, Integer> newSate = new Tuple2<>(curState.f0, curState.f1 + 1);
                        state.update(newSate);

                        return  newSate;
                    }
                });


        map.print();
        env.execute("job");


    }
}
