package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.BeanSource;
import cn.gwm.flink.streaming.constant.BaseFields;
import cn.gwm.flink.streaming.constant.FaultToleranceConstant;
import cn.gwm.flink.streaming.dwd.consumer.ES11Consumer;
import cn.gwm.flink.streaming.dwd.model.DelayEnum;
import cn.gwm.flink.streaming.dwd.streamsource.CommonStreamSource;
import cn.gwm.flink.streaming.sink.hdfs.HdfsUtil;
import cn.gwm.flink.streaming.sink.kafka.FlinkKafkaUtil;
import cn.gwm.flink.streaming.sink.kafka.KafkaConsumerEnum;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.json.JSONObject;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @ClassName DwdEs11Task
 * @Description
 * @Author LiangGuang
 * @Date 2023/05/18 14:41
 */
public class DwdEs11OtspTask {
    public static void main(String[] args) throws Exception {

        ConfigLoader.init(args);
        String jobName = ConfigLoader.get(FaultToleranceConstant.FLINK_JOB_NAME);
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        BaseTask.envSet(env, jobName);

        DataStream<JSONObject> es11_15Stream = new ES11Consumer(jobName + "_15").setTopic("ods_otsp_es11_15")
                .setSource(new CommonStreamSource(KafkaConsumerEnum.EARLIEST).setUidSuffix(jobName + "_15"))
                .handle(env)
                .map(item -> {
                    item.set("xx_time", "15");
                    return item;
                });

        DataStream<JSONObject> es11_30Stream = new ES11Consumer(jobName + "_30").setTopic("ods_otsp_es11_30")
                .setSource(new CommonStreamSource(KafkaConsumerEnum.EARLIEST).setUidSuffix(jobName + "_30"))
                .handle(env)
                .map(item -> {
                    item.set("xx_time", "30");
                    return item;
                });

        DataStream<JSONObject> es11_45Stream = new ES11Consumer(jobName + "_45").setTopic("ods_otsp_es11_45")
                .setSource(new CommonStreamSource(KafkaConsumerEnum.EARLIEST).setUidSuffix(jobName + "_45"))
                .handle(env).map(item -> {
                    item.set("xx_time", "45");
                    return item;
                });

        DataStream<JSONObject> es11_60Stream = new ES11Consumer(jobName + "_60").setTopic("ods_otsp_es11_60")
                .setSource(new CommonStreamSource(KafkaConsumerEnum.EARLIEST).setUidSuffix(jobName + "_60"))
                .handle(env).map(item -> {
                    item.set("xx_time", "60");
                    return item;
                });


        DataStream<JSONObject> unionStream = es11_15Stream.union(es11_30Stream, es11_45Stream, es11_60Stream);

        unionStream.filter(item -> DelayEnum.NORMAL.getCode().equals(item.getStr(BaseFields.DELAY)))
                .uid("dwd-filter-" + jobName)
                .addSink(FlinkKafkaUtil.toKafka("dwd_clean_ev_es11"))
                .uid("dwdToKafka-" + jobName)
                .name("dwdToKafka-es11-otsp");

        es11_15Stream.addSink(HdfsUtil.hdfsOrcSink("es11", "dwd/es11_otsp_15", BeanSource.SourceEnum.evFullFieldSource))
                .uid("dwdTdoHdfs-" + jobName + "_15")
                .name("dwdTdoHdfs-es11-otsp-15");
        es11_30Stream.addSink(HdfsUtil.hdfsOrcSink("es11", "dwd/es11_otsp_30", BeanSource.SourceEnum.evFullFieldSource))
                .uid("dwdTdoHdfs-" + jobName + "_30")
                .name("dwdTdoHdfs-es11-otsp-30");

        es11_45Stream.addSink(HdfsUtil.hdfsOrcSink("es11", "dwd/es11_otsp_45", BeanSource.SourceEnum.evFullFieldSource))
                .uid("dwdTdoHdfs-" + jobName + "_45")
                .name("dwdTdoHdfs-es11-otsp-45");

        es11_60Stream.addSink(HdfsUtil.hdfsOrcSink("es11", "dwd/es11_otsp_60", BeanSource.SourceEnum.evFullFieldSource))
                .uid("dwdTdoHdfs-" + jobName + "_60")
                .name("dwdTdoHdfs-es11-otsp-60");

        env.execute("dwdEv-es11-otsp");


    }
}
