package com.atguigu.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;

/**
 * description
 * created by A on 2021/3/19
 */
//Web/app ->Nginx -> SpringBoot -> Kafka(ods_base_log) -> Flink ->kafka(dwd page,start,display)
//MockLog ->Nginx -> Logger ->kafka(ZK)  ->Flink ->baseLogApp ->kafka(三个消费者)
public class BaseLogApp {
    public static void main(String[] args) throws Exception {
        //TODO 1 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1); //并行度设置应该与Kafka主题的分区数一致

        //env.enableCheckpointing(5000L);
        //env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //env.getCheckpointConfig().setCheckpointTimeout(10000L);
        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
        //env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall-flink/ck"));
        //System.setProperty("HADOOP_USER_NAME", "atguigu");
        String groupId = "base_log_app_group";
        String topic = "ods_base_log";
        //TODO 2 读取Kafka ods_base_log 主题数据创建流
        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaSource(groupId, topic);
        DataStreamSource<String> kafkaDS = env.addSource(kafkaSource);
        //TODO 3 新老用户校验（分组 状态编程 富函数）
        //1.将数据转换成json数据,转换失败的按脏数据处理，写入到侧输出流
        //为什么要将数据转换为JSON格式？. 方便处理
        OutputTag<String> outputTag1 = new OutputTag<String>("dirty") {
        };
        SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaDS.process(
                 new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String value, Context ctx, Collector<JSONObject> collector) throws Exception {
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(value);
                            collector.collect(jsonObject);
                        } catch (Exception ex) {
                            ctx.output(outputTag1, value);
                        }
                    }
                }
        );
        //2.将Json数据做分组处理
        //复习keyby的用法：
        /**
         * keySelector对象需要传入两个泛型参数，第一个是传入数据的类型,第二个是key的类型
         */
        KeyedStream<JSONObject, String> keyByDS = jsonObjDs.keyBy(
                new KeySelector<JSONObject, String>() {
                    @Override
                    public String getKey(JSONObject value) throws Exception {
                        return value.getJSONObject("common").getString("mid");
                    }
                }
        );
        //3.mid是一个用户的唯一标识，将数据按照用户分组，使用map+键值状态判断用户是否是老用户
        /*
               关于状态的使用:
                1.键控状态,每个分组共享一个状态
                2.状态常用方法
                状态获取方法
                    state.value()
                状态更新方法
                    state.update()

         */
        SingleOutputStreamOperator<JSONObject> jsonObjectNewFlags = keyByDS.map(
                new RichMapFunction<JSONObject, JSONObject>() {
                    //在方法内声明一个状态变量，在声明周期方法内初始化这个状态
                    private ValueState<String> firstVisitDateState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        firstVisitDateState = getRuntimeContext().getState(new ValueStateDescriptor<String>("visit-state", String.class));
                    }

                    @Override
                    public JSONObject map(JSONObject value) throws Exception {
                        //1.获取老用户标记
                        String isNew = value.getJSONObject("common").getString("is_new");
                        //2.如果标记为1，就进行校验，1表示新用户
                        if ("1".equals(isNew)) {
                            String v1 = firstVisitDateState.value();
                            if (v1 != null) {
                                value.getJSONObject("common").put("is_new", "0");
                            } else {
                                //经过校验，确认是新用户，设置状态,将当前日期写入状态
                                Long ts = value.getLong("ts");
                                SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
                                firstVisitDateState.update(simpleDateFormat.format(ts));
                            }
                        }
                        return value;
                    }
                }
        );
        //TODO 4.使用侧输出流对原始数据进行切分
        //页面日志写入主流，启动日志写入侧输出流，曝光数据写入侧输出流
        /*
             分流操作时，需要用到process方法，对每条数据进行切分
                主流直接写出到缓冲器，侧输出流使用上下文对象写出到侧输出流
         */
        OutputTag<String> startOutPutTag = new OutputTag<String>("start") {
        };
        OutputTag<String> displayOutputTag = new OutputTag<String>("display") {
        };
        SingleOutputStreamOperator<String> pageLogDS = jsonObjectNewFlags.process(
                new ProcessFunction<JSONObject, String>() {
                    @Override
                    public void processElement(JSONObject jsonObject, Context context, Collector<String> collector) throws Exception {
                        //1.获取页面数据
                        String start = jsonObject.getString("start");
                        if (start != null && start.length() > 0) {
                            //说明这条日志是启动日志,写入到侧输出流
                            context.output(startOutPutTag, jsonObject.toJSONString());
                        } else {
                            //为页面数据
                            collector.collect(jsonObject.toJSONString());

                            //获取曝光数据
                            JSONArray displays = jsonObject.getJSONArray("displays");
                            if (displays != null && displays.size() > 0) {
                                for (int i = 0; i < displays.size(); i++) {
                                    //获取单条曝光数据
                                    JSONObject displayJSONObject = displays.getJSONObject(i);
                                    //获取数据中的页面ID
                                    String pageID = jsonObject.getJSONObject("page").getString("page_id");
                                    //将页面ID插入进曝光数据中
                                    displayJSONObject.put("page_id", pageID);
                                    //将曝光数据写入侧输出流
                                    context.output(displayOutputTag, displayJSONObject.toJSONString());
                                }
                            }
                        }

                    }
                }
        );
        //TODO 5 将得到的不同的流输出到不同的DWD层主题
        pageLogDS.print("Page>>>>>>>>>");
        pageLogDS.getSideOutput(startOutPutTag).print("start>>>>>>>>>>");
        pageLogDS.getSideOutput(displayOutputTag).print("display>>>>>>>");
        pageLogDS.addSink(MyKafkaUtil.getKafkaSink("dwd_page_log"));
        pageLogDS.getSideOutput(startOutPutTag).addSink(MyKafkaUtil.getKafkaSink("dwd_start_log"));
        pageLogDS.getSideOutput(displayOutputTag).addSink(MyKafkaUtil.getKafkaSink("dwd_display_log"));

        //TODO 6 启动任务
        env.execute();

    }
}
