package com.atguigu.edu.realtime.app.dwd.traffic;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.common.KafkaTopicConfig;
import com.atguigu.edu.realtime.common.LogTagType;
import com.atguigu.edu.realtime.function.FixVisitorProcessFunction;
import com.atguigu.edu.realtime.function.LogSplitProcessFunction;
import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @ClassName: BaseLogApp
 * @Description: TODO 日志分流
 * @Author: zhaoxunfeng
 * @Date: 2022-08-31 11:56
 * @Version: 1.0.0
 */
public class BaseLogApp {
    public static void main(String[] args) {
        // TODO 1、获取一个执行入口环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);

        //TODO 2、消费kafka ods_log主题的数据并创建流
        String groupId = "BaseLogApp";
        DataStreamSource<String> kafkaDS = env.addSource(KafkaUtil.getKafkaConsumer(KafkaTopicConfig.ODS_LOG_TOPIC, groupId));

        //TODO 3、过滤掉非Json格式的数据,并且将每行数据转换为Json对象
        SingleOutputStreamOperator<JSONObject> filterDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject json = JSONObject.parseObject(value);
                    out.collect(json);
                } catch (Exception e) {
                    System.out.println(value + " 非json格式!!!");
                    e.printStackTrace();
                }
            }
        });


        //TODO 4、按照mid进行分组
        KeyedStream<JSONObject, String> keyedStream = filterDS.keyBy(json -> json.getJSONObject("common").getString("mid"));

        //TODO 5、使用状态编程做新老访客标记校验
        SingleOutputStreamOperator<JSONObject> fixedDS = keyedStream.process(new FixVisitorProcessFunction());

        //TODO 6、使用侧输出流进行分流处理
        SingleOutputStreamOperator<JSONObject> splitDS = fixedDS.process(new LogSplitProcessFunction());

        //TODO 7、提取各个侧输出流数据
        DataStream<String> playLogDS = splitDS.getSideOutput(LogTagType.PLAY_LOG_TAG).map(JSONObject::toString);

        DataStream<String> displayLogDS = splitDS.getSideOutput(LogTagType.DISPLAY_LOG_TAG).map(JSONObject::toString);

        DataStream<String> errorLogDS = splitDS.getSideOutput(LogTagType.ERR_LOG_TAG).map(JSONObject::toString);

        DataStream<String> actionsLogDS = splitDS.getSideOutput(LogTagType.ACTION_LOG_TAG).map(JSONObject::toString);

        DataStream<String> startLogDS = splitDS.getSideOutput(LogTagType.START_LOG_TAG).map(JSONObject::toString);

        DataStream<String> pageLogDS = splitDS.getSideOutput(LogTagType.PAGE_LOG_TAG).map(JSONObject::toString);

        //TODO 8、将数据打印并写入对应的主题
        playLogDS.addSink(KafkaUtil.getKafkaProducer(KafkaTopicConfig.DWD_TRAFFIC_VIDEO_PLAY_LOG_TOPIC));

        displayLogDS.addSink(KafkaUtil.getKafkaProducer(KafkaTopicConfig.DWD_TRAFFIC_DISPLAY_LOG_TOPIC));

        errorLogDS.addSink(KafkaUtil.getKafkaProducer(KafkaTopicConfig.DWD_TRAFFIC_ERROR_LOG_TOPIC));

        actionsLogDS.addSink(KafkaUtil.getKafkaProducer(KafkaTopicConfig.DWD_TRAFFIC_ACTION_LOG_TOPIC));

        startLogDS.addSink(KafkaUtil.getKafkaProducer(KafkaTopicConfig.DWD_TRAFFIC_START_LOG_TOPIC));

        pageLogDS.addSink(KafkaUtil.getKafkaProducer(KafkaTopicConfig.DWD_TRAFFIC_PAGE_LOG_TOPIC));

        //TODO 9、启动任务
        try {
            env.execute();
        } catch (
                Exception e) {
            throw new RuntimeException(e);
        }
    }
}
