package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.estimateSoh.ConfigData;
import cn.gwm.flink.streaming.beans.estimateSoh.EstimateSohRecord;
import cn.gwm.flink.streaming.constant.DefaultConstant;
import cn.gwm.flink.streaming.function.process.EstimateSohOutProcessFunction;
import cn.gwm.flink.streaming.function.process.EstimateSohPrehandleMapFunction;
import cn.gwm.flink.streaming.function.process.EstimateSohProcessFunction;
import cn.gwm.flink.streaming.function.process.EstmateSohBroadcastProcessFunction;
import cn.gwm.flink.streaming.function.reduce.EstimateSohCountReduceEvFunction;
import cn.gwm.flink.streaming.function.source.EstimateSohBroadcastSourceFunction;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.core.util.BooleanUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Map;
import java.util.Objects;

/**
 * soh估算任务
 *
 * @author GW00283474
 */
public class EstimateSohEvDwmTask extends BaseTask {
    private static final Logger logger = LoggerFactory.getLogger(EstimateSohEvDwmTask.class);
    static final MapStateDescriptor<String, ConfigData> CONFIG_DATA_STATE = new MapStateDescriptor<>("configDataDescEv", Types.STRING, Types.POJO(ConfigData.class));

    public static void main(String[] args) {
        try {
            ConfigLoader.init(args);
            String startEnv = ConfigLoader.get(DefaultConstant.BOOT_PROFILES_ACTIVE_PREFIX, "dev");
            execute(startEnv, args);
        } catch (Exception e) {
            logger.error("{}任务异常：{}", "estimateSohTaskEv", e.getMessage());
            e.printStackTrace();
        }
    }

    static void execute(String startEnv, String[] args) throws Exception {
        String jobName = "estimateSohTaskEv";
        int extend = 2;
        StreamExecutionEnvironment env = getEnv(jobName);
        env.enableCheckpointing(3 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE);
        // 如果某些子任务的对齐时间超过了这个超时，那么检查点将作为一个非对齐的检查点进行
        env.getCheckpointConfig().setAlignmentTimeout(Duration.ofMillis(5 * 60 * 1000L));
        env.getCheckpointConfig().setCheckpointTimeout(6 * 60 * 1000L);
        env.setParallelism(1);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500L);
        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.minutes(15), Time.seconds(10)));
        //纯电
        String evTopic = ConfigLoader.get("kafka.clean.topic.dwd.ev");
        FlinkKafkaConsumer<String> kafkaConsumer = decorateKafkaConsumer("estimate-soh-ev-dwm", evTopic);
        SingleOutputStreamOperator<EstimateSohRecord> strToBean = env.addSource(kafkaConsumer).uid("strToBeanSource-uid")
                .map(new EstimateSohPrehandleMapFunction())
                .uid("strToBean-uid")
                .filter(record -> Objects.nonNull(record.getCell_battery_type())
                        && (!DefaultConstant.Cell_Battery_Type.equals(record.getCell_battery_type()))
                        && Objects.nonNull(record.getBMS_RMC_CellVoltMax())
                        && Objects.nonNull(record.getBMS_RMC_CellVoltMin())
                        && Objects.nonNull(record.getVCU_VCUSts())
                        && !StringUtils.isEmpty(record.getCell_battery_type())).uid("strToBean-uid2")
                .returns(Types.GENERIC(EstimateSohRecord.class));

        //加载配置项广播流
        BroadcastStream<Map<String, ConfigData>> broadcast = env.addSource(new EstimateSohBroadcastSourceFunction(startEnv)).uid("broadcast-uid").broadcast(CONFIG_DATA_STATE);
        DataStream<EstimateSohRecord> recordsAfterBroadcast = strToBean.keyBy(s -> s.getDeviceId()).connect(broadcast)
                .process(new EstmateSohBroadcastProcessFunction(CONFIG_DATA_STATE)).setParallelism(env.getParallelism() * extend).uid("recordsAfterBroadcast-uid");
        SingleOutputStreamOperator<EstimateSohRecord> countProcess = recordsAfterBroadcast.assignTimestampsAndWatermarks(WatermarkStrategy.<EstimateSohRecord>forBoundedOutOfOrderness(Duration.ZERO)
                        .withTimestampAssigner(((element, recordTimestamp) -> element.getItem_time()))).setParallelism(env.getParallelism() * extend)
                .keyBy(s -> s.getDeviceId())
                .process(new EstimateSohProcessFunction()).setParallelism(env.getParallelism() * extend * 6).uid("countProcess-uid1")
                .filter(record -> BooleanUtil.isTrue(record.getEnableFlag())).uid("countProcess-uid3").setParallelism(env.getParallelism() * extend)
                .keyBy(record -> record.getDeviceId())
                .countWindow(2L, 1L)
                .reduce(new EstimateSohCountReduceEvFunction()).uid("countProcess-uid2").setParallelism(env.getParallelism() * extend);
        //数据分流处理
        OutputTag<String> hiveOutputTag = new OutputTag<String>("estimateSohOutputTagEv") {
        };
        SingleOutputStreamOperator<String> resultStream = countProcess.keyBy(s -> s.getDeviceId()).process(new EstimateSohOutProcessFunction(hiveOutputTag));
        //resultStream.print("总结果：");
        DataStream<String> sideOutput = resultStream.getSideOutput(hiveOutputTag);
        //sideOutput.print("边路：");
        // 写入 kafka
        String topic = ConfigLoader.get("kafka.estimateSoh.ev.topic");
        defaultSinkToKafka(resultStream, topic, "600000", env.getParallelism() * extend);
        //写入hive
        StreamingFileSink<String> hdfsSink = hdfsSink("estimate_soh_ev", ".txt", "dwm/estimate_soh_ev");
        sideOutput.addSink(hdfsSink).uid("hdfsSink-uid").name("sohEvHiveSink").setParallelism(env.getParallelism() * extend);
        env.execute(jobName);
    }
}
