package com.tmsb.es;

import com.alibaba.fastjson.JSON;
import com.tmsb.pojo.ESScore;
import com.tmsb.pojo.MysqlBinlog;
import com.tmsb.pojo.UcenterUser;
import com.tmsb.sink.DauKeyStateSink;
import com.tmsb.sink.ScoreSink;
import com.tmsb.utils.*;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.PredefinedOptions;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import redis.clients.jedis.JedisCluster;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * Description: 用es的score表的获得每个省的日贡献值和日贡献值两个实时指标
 * Version:1.0.0
 *
 * @author xiekb
 * @date 2020/7/3
 */
public class ScoreAnalysis {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//      env.setStateBackend(new FsStateBackend("file:///C:\\Users\\home\\Desktop\\mall\\orderGoods"));
        env.setStateBackend(new FsStateBackend(ConfUtil.getScoreAnalysisCheckpointUri()));

        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.enableCheckpointing(ConfUtil.getCheckpointInterval(), CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(ConfUtil.getCheckpointMinPauseBetween());
        env.setParallelism(ConfUtil.getParallelism());

        //重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                ConfUtil.getfailureRate(), org.apache.flink.api.common.time.Time.of(ConfUtil.getfailureInterval(), TimeUnit.MINUTES),
                org.apache.flink.api.common.time.Time.of(ConfUtil.getdelayInterval(), TimeUnit.SECONDS)
        ));

        DataStream<String> source = env
                .addSource(new FlinkKafkaConsumer<>(ConfUtil.getTopicOfScore(), new SimpleStringSchema(), ConfUtil.getKafkaProperties()))
                .uid("kafka-source");

        SingleOutputStreamOperator<ESScore> esScore = source
                .map(s -> JSON.parseObject(s, ESScore.class))
                .uid("map-parser-pojo")
                .returns(ESScore.class);
        esScore.addSink(new ScoreSink())
                .uid("score-sink");


        env.execute("ScoreAnalysis");
    }
}
