package net.bwie.realtime.jtp.dwd.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.dwd.log.function.AdjustIsNewProcessFunction;
import net.bwie.realtime.jtp.dwd.log.function.AppLogSplitProcessFunction;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * DWD层数据应用开发：将ODS层采集原始日志数据，进行分类处理，存储kafka队列
 * 数据流向：kafka-> flink datastream ->kafka
 */
public class JtpAppLogEtlJob {

    public static void main(String[] args) throws Exception {
        //1.执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //2.数据源-source
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "topic-log");
//        kafkaStream.print("kafka");
        //3.数据转换-transformation
        DataStream<String> pageStream = processLog(kafkaStream);
        //4.数据输出-sink
        KafkaUtil.producerKafka(kafkaStream,"dwd-traffic-page-log");
        //5.触发执行-execute
        env.execute("JtpAppLogEtlJob");

    }

    /**
     * 对实时获取APP流量日志数据进行ETL处理，并进行分流，存储kafka消息队列
     * 1-数据清洗
     * 2-新老访客状态标记修复
     * 3-数据分流
     * 4-存储数据（非页面日志存储）
     */
    private static DataStream<String> processLog(DataStream<String> stream) {
        //1-数据清洗
        DataStream<String> jsonStream= appLogCleaned(stream);

        //2-新老访客状态标记修复
        DataStream<String> etlStream= processIsNew(jsonStream);

        //3-数据分流
        DataStream<String> pageStream = splitStream(etlStream);

        return pageStream;
    }

    private static DataStream<String> splitStream(DataStream<String> stream) {

        //todo 第1步、侧边流输出标记
        final OutputTag<String> errorTag =new OutputTag<String>("error-log"){};
        final OutputTag<String> startTag =new OutputTag<String>("start-log"){};
        final OutputTag<String> displayTag =new OutputTag<String>("display-log"){};
        final OutputTag<String> actionTag =new OutputTag<String>("action-log"){};

        //TODO 第2步、日志分流处理
        SingleOutputStreamOperator<String> pageStream = stream.process(
                new AppLogSplitProcessFunction(errorTag,startTag,displayTag,actionTag) {
                });

        //TODO 第3步、测流输出
        DataStream<String> errorStream = pageStream.getSideOutput(errorTag);
        KafkaUtil.producerKafka(errorStream,"dwd-traffic-error-log");
        DataStream<String> startStream = pageStream.getSideOutput(startTag);
        KafkaUtil.producerKafka(startStream,"dwd-traffic-start-log");
        DataStream<String> displayStream = pageStream.getSideOutput(displayTag);
        KafkaUtil.producerKafka(displayStream,"dwd-traffic-display-log");
        DataStream<String> actionStream = pageStream.getSideOutput(actionTag);
        KafkaUtil.producerKafka(actionStream,"dwd-traffic-action-log");

        //TODO 第4步、输出主流
        return pageStream;
    }

    /**
     * 2-新老访客状态标记修复（纠正）
     * @param stream
     * @return
     */
    private static DataStream<String> processIsNew(DataStream<String> stream) {

        //a-按照设备ID进行分组
        KeyedStream<String, String> midStream = stream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String s) throws Exception {
                //todo value值数据格式
                /*
                    {
                      "common": {
                        "ar": "420000",
                        "ba": "iPhone",
                        "ch": "Appstore",
                        "is_new": "0",
                        "md": "iPhone X",
                        "mid": "mid_729169",
                        "os": "iOS 13.3.1",
                        "uid": "698",
                        "vc": "v2.1.134"
                      },
                      "page": {
                        "during_time": 16707,
                        "last_page_id": "home",
                        "page_id": "search"
                      },
                      "ts": 1755180334000
                    }
                 */
                //todo 经过上述分析可知，需要经过2次解析获取mid值
                return JSON.parseObject(s).getJSONObject("common").getString("mid");
            }
        });
        //b-状态编程，对is_new进行校验修复
        DataStream<String> isNewStream = midStream.process(new AdjustIsNewProcessFunction());

        //c-返回数据流
        return isNewStream;
    }

    /**
     * 1-数据清洗，将不合格数据侧边输出
     * @param stream 原始app流量日志数据流
     * @return
     */
    private static DataStream<String> appLogCleaned(DataStream<String> stream) {

        //a-脏数据侧边流输出时标记
        final OutputTag<String> dirtyTag = new OutputTag<String>("dirty-log"){};

        //b-数据清洗处理
        SingleOutputStreamOperator<String> cleanedStream = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                try {
                    //a.解析json数据
                    JSON.parseObject(value);
                    //b.没有异常，解析正确，正常输出
                    out.collect(value);
                }catch (Exception e){
                    //c.捕获异常，侧边流输出数据
                    ctx.output(dirtyTag,value);
                }

            }
        });

        //c-侧边流输出：脏数据
        SideOutputDataStream<String> dirtyStream = cleanedStream.getSideOutput(dirtyTag);
        KafkaUtil.producerKafka(dirtyStream,"dwd-traffic-dirty-log");
        //d-返回正常数据流
        return cleanedStream;
    }

}
