package state.demo1;


import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

public class Main {

  public static void main(String[] args) throws Exception {

    Configuration configuration = new Configuration();
    Configuration flinkConf = new Configuration();
    flinkConf.setString("rest.port","9091");
    flinkConf.setString("$internal.pipeline.job-id", "c0e67372c9136321a83c49257cf79999");
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(flinkConf);
    env.setParallelism(1);

    env.disableOperatorChaining();
    env.enableCheckpointing(2000);

    EmbeddedRocksDBStateBackend embeddedRocksDBStateBackend = new EmbeddedRocksDBStateBackend();
    HashMapStateBackend hashMapStateBackend = new HashMapStateBackend();
    env.setStateBackend(hashMapStateBackend);

    CheckpointConfig checkpointConfig = env.getCheckpointConfig();
    checkpointConfig.setCheckpointStorage(new FileSystemCheckpointStorage("file:///flink_ckp"));
//    checkpointConfig.setCheckpointStorage(new FileSystemCheckpointStorage("hdfs:///user/flink/calc_primary/flink-checkpoints"));
    checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

    DataStreamSource<String> ds = env.addSource(new TestSource());

    ds.map(new MapFunction<String, Tuple2<String, Integer>>() {
          @Override
          public Tuple2<String, Integer> map(String value) throws Exception {
            return new Tuple2<>(value, 1);
          }
        })
        .keyBy(0)
        .process(new MyKeyedProcessFunction())
        .print();

    env.execute("app");
  }
}


class MyKeyedProcessFunction extends KeyedProcessFunction<Tuple, Tuple2<String, Integer>, Tuple2<String, Integer>> {

  // map类型的状态，其他还有 value、list
  private final MapStateDescriptor<String, Long> mapStateDesc = new MapStateDescriptor<>(
          "itemsMap",
          BasicTypeInfo.STRING_TYPE_INFO,
          BasicTypeInfo.LONG_TYPE_INFO);

  @Override
  public void open(Configuration parameters) throws Exception {

    // 为状态设置TTL
    StateTtlConfig stateTtlConfig = StateTtlConfig
        .newBuilder(Time.hours(1))
        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
        .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
        .cleanupInRocksdbCompactFilter(100)
        .build();

     mapStateDesc.enableTimeToLive(stateTtlConfig);
  }

  @Override
    public void processElement(
        Tuple2<String, Integer> value,
        KeyedProcessFunction<Tuple, Tuple2<String, Integer>, Tuple2<String, Integer>>.Context ctx,
    Collector<Tuple2<String, Integer>> out) throws Exception {

    MapState<String, Long> mapState = getRuntimeContext().getMapState(mapStateDesc);
    Long l = mapState.get(value.f0);
    if (l == null) {
      mapState.put(value.f0, 0L);
    } else {
      mapState.put(value.f0, l + 1);
    }
    out.collect(value);
  }
}
