package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.SourceHbaseBean;
import cn.gwm.flink.streaming.beans.termalRunaway.WaringTermalDwmWithStragegyPhev;
import cn.gwm.flink.streaming.beans.termalRunaway.WaringTermalWithStragegyAndFlagPhev;
import cn.gwm.flink.streaming.constant.DefaultConstant;
import cn.gwm.flink.streaming.constant.TermalProcessResult;
import cn.gwm.flink.streaming.constant.VehiclePowerType;
import cn.gwm.flink.streaming.function.map.TermalWaringNullPhevMapFunction;
import cn.gwm.flink.streaming.function.process.*;
import cn.gwm.flink.streaming.function.source.TermalBroadcastSourceFunction;
import cn.gwm.flink.streaming.sink.hbase.HbaseSinkProducer;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.lang.Dict;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Date;
import java.util.List;
import java.util.Map;


/**
 * @author GW00283474
 */
public class ThermalWarningPhevDwmTaskTest extends BaseTask {
    private static final Logger logger = LoggerFactory.getLogger(ThermalWarningPhevDwmTaskTest.class);
    static final MapStateDescriptor<String, List> PHEV_MAP_STATE_DESCRIPTOR = new MapStateDescriptor<>("PhevAlertRuleConfig", BasicTypeInfo.STRING_TYPE_INFO, TypeInformation.of(List.class));
    public static void main(String[] args){
       try {
           ConfigLoader.init(args);
           String startEnv=ConfigLoader.get(DefaultConstant.BOOT_PROFILES_ACTIVE_PREFIX,"dev");
           String jobName="thermalWarningPhevDwmTaskTest";
           StreamExecutionEnvironment env= getEnv(jobName);
           env.enableCheckpointing(10*60*1000L, CheckpointingMode.EXACTLY_ONCE);
           // 如果某些子任务的对齐时间超过了这个超时，那么检查点将作为一个非对齐的检查点进行
           env.getCheckpointConfig().setAlignmentTimeout(Duration.ofMillis(12 *60 *1000L));
           env.getCheckpointConfig().setCheckpointTimeout(16 * 60 * 1000L);
           env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500L);
           env.setStateBackend(new EmbeddedRocksDBStateBackend(true));
           env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.minutes(15),Time.seconds(10)));
           int parallelism = 1;
           int extend=3;
           env.setParallelism(parallelism);
           phevExecute(env,parallelism,extend,startEnv,args);
           env.execute(jobName);
       }catch (Exception e){
           e.printStackTrace();
       }
    }
    /**
     * 混动热失控处理
     * @param args
     * @throws Exception
     */
    static void phevExecute(StreamExecutionEnvironment env,int parallelism,int extend,String startEnv,String[] args) throws Exception{
        //MySql Rule Broadcast
        BroadcastStream<Map<String, List>> broadcastStream = env.addSource(new TermalBroadcastSourceFunction(startEnv)).uid("broadcastStream-uid").broadcast(PHEV_MAP_STATE_DESCRIPTOR);
        FlinkKafkaConsumer<String> kafkaConsumer=decorateKafkaConsumer("thermalWarning-phev-dwm-test","dwd_clean_phev_test");
        SingleOutputStreamOperator<String> kafkaOperator = env.addSource(kafkaConsumer).uid("kafkaOperator-uid");
        SingleOutputStreamOperator<WaringTermalDwmWithStragegyPhev> sitOperator = kafkaOperator.map((MapFunction<String, WaringTermalDwmWithStragegyPhev>) s -> {
            logger.info("PHEV热失控的kafka原测试数据：{}",s);
            WaringTermalDwmWithStragegyPhev sit = JSONUtil.toBean(s, WaringTermalDwmWithStragegyPhev.class);
            Date date = new Date();
            sit.setIngestionTime(DateUtil.format(date, DatePattern.NORM_DATETIME_FORMAT));
            sit.setIngestionDate(DateUtil.format(date, DatePattern.NORM_DATE_FORMAT));
            return sit;
        }).uid("sitOperator-uid1")
                .map(new TermalWaringNullPhevMapFunction()).uid("sitOperator-uid12")
                .filter(s->!s.checkFieldNullValue()).uid("sitOperator-uid3");

        DataStream<WaringTermalDwmWithStragegyPhev> termalDwmWithStragegyStream = sitOperator
                .keyBy(record -> record.getDeviceId())
                .connect(broadcastStream)
                .process(new TermalWarningPhevBroadcastProcessFunction(PHEV_MAP_STATE_DESCRIPTOR)).setParallelism(parallelism*extend).uid("termalDwmWithStragegyStream-uid").shuffle();
        OutputTag<JSONObject> outputTag= new OutputTag<JSONObject>("bmsTrdSignalPhev"){};
        DataStream<Dict> process = termalDwmWithStragegyStream.assignTimestampsAndWatermarks(WatermarkStrategy.<WaringTermalDwmWithStragegyPhev>forBoundedOutOfOrderness(Duration.ZERO)
                        .withTimestampAssigner((el, recordTimestamp) -> el.getItem_time())).shuffle()
                .keyBy(record -> record.getDeviceId())
        //温度、温差、电压检测
                .process(new TermalWarningPhevProcessFunction()).setParallelism(parallelism*extend).uid("process-uid").shuffle();
        //电压变化，温升速率检测
        DataStream<Dict> process1 = process.keyBy(s -> ((WaringTermalWithStragegyAndFlagPhev) s.get(TermalProcessResult.CHECK_RECORD)).getDeviceId())
                .countWindow(2, 1)
                .process(new TermalWarningRisePhevProcessFunction()).setParallelism(parallelism*extend).uid("process1-uid").shuffle();
        //车端上传热失控事件融合
        DataStream<Dict> process2 = process1.keyBy(s -> ((WaringTermalWithStragegyAndFlagPhev) s.get(TermalProcessResult.CHECK_RECORD)).getDeviceId())
                .process(new TermalWarningPhevVehicleCheckProcessFunction()).setParallelism(parallelism * extend).uid("process2-uid").shuffle();
        //所有检测结果处理
        SingleOutputStreamOperator<String> operator = process2.keyBy(s -> ((WaringTermalWithStragegyAndFlagPhev) s.get(TermalProcessResult.CHECK_RECORD)).getDeviceId())
                .process(new TermalWarningResultPhevProcessFunction(2, 1, outputTag,VehiclePowerType.PHEV)).setParallelism(parallelism*extend).uid("operator-uid");
        //operator.print("热失控处理");
        //写入kibana
        operator.map(new MapFunction<String, String>() {
            @Override
            public String map(String value) throws Exception {
                logger.info("PHEV热失控检测结果：{}",value);
                return null;
            }
        });
        // 写入 kafka
        String topic = ConfigLoader.get("kafka.termalWarning.phev.topic");
        defaultSinkToKafka(operator,topic, "720000",parallelism*extend);
        //写入hbase
        DataStream<JSONObject> sideOutput= operator.getSideOutput(outputTag);
        //sideOutput.print("热失控信号");
        //写入kibana
        sideOutput.map(new MapFunction<JSONObject, String>() {
            @Override
            public String map(JSONObject value) throws Exception {
                logger.info("PHEV热失控事件源信号:{}",JSONUtil.toJsonStr(value));
                return null;
            }
        });
        DataStream<SourceHbaseBean> hbaseSinkStreamLast = convertStream(sideOutput,ConfigLoader.get("hbase.table.termalWarning.phev.name"),parallelism*extend);
        hbaseSinkStreamLast.addSink(new HbaseSinkProducer<SourceHbaseBean>()).name("recordHbasePhev").setParallelism(parallelism *extend).uid("hbaseSinkStreamLast-uid");
    }
}
