package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.BeanSource;
import cn.gwm.flink.streaming.constant.BaseFields;
import cn.gwm.flink.streaming.constant.FaultToleranceConstant;
import cn.gwm.flink.streaming.dwd.consumer.EC24Consumer;
import cn.gwm.flink.streaming.dwd.model.DelayEnum;
import cn.gwm.flink.streaming.sink.hdfs.HdfsUtil;
import cn.gwm.flink.streaming.sink.kafka.FlinkKafkaUtil;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.json.JSONObject;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @ClassName DwdEc24Task
 * @Description
 * @Author LiangGuang
 * @Date 2023/05/18 14:41
 */
public class DwdEc24Task {
    public static void main(String[] args) throws Exception {

        ConfigLoader.init(args);
        String jobName = ConfigLoader.get(FaultToleranceConstant.FLINK_JOB_NAME);

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        BaseTask.envSet(env, jobName);

        DataStream<JSONObject> ec24Stream = new EC24Consumer(jobName).handle(env);
        ec24Stream.filter(item -> DelayEnum.NORMAL.getCode().equals(item.getStr(BaseFields.DELAY))).uid("dwd-filter-" + jobName)
                .addSink(FlinkKafkaUtil.toKafka(ConfigLoader.get("kafka.clean.topic.dwd.ev")))
                .uid("dwdToKafka-" + jobName).name("dwdToKafka-ec24");
        ec24Stream.addSink(HdfsUtil.hdfsOrcSink("ec24", "dwd/" + ConfigLoader.get("hive.table.name.dwd.ev"), BeanSource.SourceEnum.evFullFieldSource))
                .uid("dwdTdoHdfs-" + jobName).name("dwdTdoHdfs-ec24");
        env.execute("dwdEv-ec24");

    }
}
