package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.BeanSource;
import cn.gwm.flink.streaming.constant.BaseFields;
import cn.gwm.flink.streaming.constant.FaultToleranceConstant;
import cn.gwm.flink.streaming.dwm.charge.function.ChargeFunction;
import cn.gwm.flink.streaming.dwm.charge.function.ChargePreFunction;
import cn.gwm.flink.streaming.dwm.charge.model.ChargeConstant;
import cn.gwm.flink.streaming.dwm.charge.model.ChargePhevConstant;
import cn.gwm.flink.streaming.dwm.charge.template.ChargeStateModel;
import cn.gwm.flink.streaming.sink.kafka.FlinkKafkaUtil;
import cn.gwm.utils.ConfigLoader;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * 充电主题 dwm层 phev
 */
public class DwmChargePhevTask {

    public static void main(String[] args) throws Exception {
        ConfigLoader.init(args);

        ChargeConstant instance = ChargePhevConstant.getInstance();
        String jobName = ConfigLoader.get(FaultToleranceConstant.FLINK_JOB_NAME);

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        BaseTask.envSet(env, jobName);


        final FlinkKafkaConsumer<String> kafkaConsumer = FlinkKafkaUtil.getConsumerGroupOffsets(
                "consumer-dwm-charge-phev",
                ConfigLoader.get("kafka.clean.topic.dwd.phev", "dwd_clean_phev")
        );
//        kafkaConsumer.setStartFromTimestamp(1688972812000L);


        OutputTag<JSONObject> summarStream = new OutputTag<JSONObject>("summar") {
        };
        OutputTag<JSONObject> detailStream = new OutputTag<JSONObject>("detail") {
        };
        OutputTag<JSONObject> favourableStream = new OutputTag<JSONObject>("favourable") {
        };
        SingleOutputStreamOperator<JSONObject> process = env.addSource(kafkaConsumer)
                .map(JSONUtil::parseObj).uid("convertJson_" + jobName)
                .keyBy(sd -> sd.getStr(BaseFields.vin))
                .flatMap(new ChargePreFunction(instance)).uid("preCharge_" + jobName)
                .keyBy(sd -> sd.getStr(BaseFields.vin))
                .flatMap(new ChargeFunction(instance)).uid("charge_" + jobName)
                .process(new ProcessFunction<JSONObject, JSONObject>() {
                    @Override
                    public void processElement(JSONObject chargeData, Context ctx, Collector<JSONObject> out) throws Exception {
                        if (ChargeConstant.detail.equals(chargeData.getStr("type"))) {
                            ctx.output(detailStream, chargeData);
                        } else if (ChargeConstant.favourable.equals(chargeData.getStr("type"))) {
                            ctx.output(favourableStream, chargeData);
                        } else {
                            ctx.output(summarStream, chargeData);
                        }
                    }
                }).uid("process_" + jobName);


        DataStream<JSONObject> summarProucerStream = process.getSideOutput(summarStream)
                .map(item -> {
                    JSONObject transJson = BeanSource.transJson(item, BeanSource.SourceEnum.evChargeSummar);
                    if (item.containsKey(ChargeStateModel.END_FLAG)) {
                        transJson.set(ChargeStateModel.END_FLAG, BaseFields.defaultVal);
                    }
                    return transJson;
                }).uid("summarMap_" + jobName);
        DataStream<JSONObject> detailsProucerStream = process.getSideOutput(detailStream)
                .map(item -> BeanSource.transJson(item, BeanSource.SourceEnum.evChargeDetail))
                .uid("detailMap_" + jobName);


        DataStream<JSONObject> favourableProcucerStream = process.getSideOutput(favourableStream);

        summarProucerStream.addSink(FlinkKafkaUtil.toKafka(ConfigLoader.get("kafka.topic.charge.summar"))).name("toKafkaSummar-phev");
        detailsProucerStream.addSink(FlinkKafkaUtil.toKafka(ConfigLoader.get("kafka.topic.charge.detail"))).name("toKafkaDetail-phev");

        summarProucerStream.filter(item -> item.containsKey(ChargeStateModel.END_FLAG))
                .addSink(BaseTask.getFileSink(ConfigLoader.get("hive.table.name.dwm.charge.summar"),
                        ".txt",
                        "dwm/dwm_charge_phev_summar",
                        BeanSource.SourceEnum.evChargeSummar))
                .uid("summar2hdfs_" + jobName)
                .name("toHdfsSummar-phev");

        detailsProucerStream.addSink(BaseTask.getFileSink(ConfigLoader.get("hive.table.name.dwm.charge.detail"),
                        ".txt",
                        "dwm/dwm_charge_phev_detail",
                        BeanSource.SourceEnum.evChargeDetail))
                .uid("detail2hdfs_" + jobName)
                .name("toHdfsDetail-phev");
        favourableProcucerStream.addSink(BaseTask.getFileSink("favourable",
                        ".txt",
                        "dwm/phev_favourable",
                        BeanSource.SourceEnum.chargeFavourable))
                .uid("favourable2hdfs_" + jobName)
                .name("toHdfsFavourable_phev");

        env.execute("dwmCharge-phev");


    }


}
