package com.tmsb.es;

import com.alibaba.fastjson.JSON;
import com.tmsb.pojo.ESStockAlarm;
import com.tmsb.utils.AlarmProcessWindowFunction;
import com.tmsb.utils.ConfUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.contrib.streaming.state.PredefinedOptions;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.io.IOException;
import java.util.concurrent.TimeUnit;

/**
 * @Author wuxiangwu
 * @Date 2020/8/12 10:29
 * @Description ${description}
 * @Version 1.0
 */
public class StockPensionAnalysis {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        RocksDBStateBackend rocksDBStateBackend = null;
        try {
            rocksDBStateBackend = new RocksDBStateBackend(ConfUtil.getStockPenAlarmCheckpointUri(), true);
        } catch (IOException e) {
            e.printStackTrace();
        }
        rocksDBStateBackend.setPredefinedOptions(PredefinedOptions.SPINNING_DISK_OPTIMIZED_HIGH_MEM);
         env.setStateBackend(rocksDBStateBackend);

    //    env.setStateBackend(new FsStateBackend("file:///C:\\Users\\wuxiangwu\\Desktop\\mallstock"));
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.enableCheckpointing(ConfUtil.getCheckpointInterval(), CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(ConfUtil.getCheckpointMinPauseBetween());
     //   env.setParallelism(ConfUtil.getParallelism());
        env.setParallelism(6);

        //TODO 重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                ConfUtil.getfailureRate(), org.apache.flink.api.common.time.Time.of(ConfUtil.getfailureInterval(), TimeUnit.MINUTES),
                org.apache.flink.api.common.time.Time.of(ConfUtil.getdelayInterval(), TimeUnit.SECONDS)
        ));

        //TODO 获取stockPension的原始流
        DataStream<String> streamSource = env
                .addSource(new FlinkKafkaConsumer<>(ConfUtil.getTopicOfPension(), new SimpleStringSchema(), ConfUtil.getKafkaProperties()))
                .uid("pension-source");

        //TODO 将source转换成pensionStream
        SingleOutputStreamOperator<ESStockAlarm> pensionStream = streamSource
                .map(s -> {
                    ESStockAlarm esStockAlarm = JSON.parseObject(s, ESStockAlarm.class);
                    esStockAlarm.setName(ConfUtil.getTopicOfPension());
                    return esStockAlarm;
                }).uid("pension-map-alarm");
    //    pensionStream.print("originalStream");
        //TODO 报警逻辑处理
        pensionStream.assignTimestampsAndWatermarks(
                new BoundedOutOfOrdernessTimestampExtractor<ESStockAlarm>(Time.seconds(30)) {
                    @Override
                    public long extractTimestamp(ESStockAlarm e) {
                        return e.getCreated_time() * 1000;
                    }
                }
        ).uid("pension-time-alarm")
                .keyBy("user_id", "name")
                .timeWindow(Time.seconds(10))
                .process(new AlarmProcessWindowFunction())
                .uid("pension-process-alarm")
                .addSink(new FlinkKafkaProducer<String>(
                        ConfUtil.getBootstrapServers(),
                        "TEST",
                        new SimpleStringSchema()))
                .setParallelism(1)
                .uid("pension-sink-alarm");

        //TODO 执行
        try {
            env.execute("stockPension");
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}
