package com.chenxu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chenxu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * Date:2021/07/10
 * Desc:准备用户行为日志的DWD层
 */
public class BaseLogApp {

    private static final String TOPIC_START = "dwd_start_log";
    private static final String TOPIC_DISPLAY = "dwd_display_log";
    private static final String TOPIC_PAGE = "dwd_page_log";

    public static void main(String[] args) throws Exception {
        //TODO 1.准备环境
        //1.1 创建Flink流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //1.2 设置并行度
        env.setParallelism(1);

        //1.3设置checkpoint
        //这里开启checkpoint的情况下，运行时必须开启HDFS；
        //需要先开启hdfs根目录写入授权：hdfs dfs -chmod -R 777 /；
        //或者设置好hadoop的用户名
        //System.setProperty("HADOOP_USER_NAME","chenxu");
        //每5000ms开启一次checkpoint
//        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
//        //超过1分钟失效
//        env.getCheckpointConfig().setCheckpointTimeout(60000);
//        //设置状态后端，这里用文件系统；
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall/checkpoint/baselogApp"));

        //TODO 2.从Kafka的ODS层读取数据
        String topic = "ods_base_log";
        //组名自己定；
        String groupId = "base_db_app_group";

        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        //读取kafka数据；
        DataStreamSource<String> kafkaDS = env.addSource(kafkaSource);

        //TODO 3.对kafkaDS中数据进行结构的转换      String-->Json
        //转换为String的原因是用String来判断性能比较差，用Json更合适，因为数据本身就是Json；
//        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(new MapFunction<String, JSONObject>() {
//            @Override
//            public JSONObject map(String value) throws Exception {
//                return JSON.parseObject(value);
//            }
//        });

        //jsonStrDS.map(JSON::parseObject);
        //Lambda表达式；
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(jsonStr -> JSON.parseObject(jsonStr));
        //jsonObjDS.print("json>>>>");

        //TODO 4.识别新老客户  这里是做一个is_new确认，如果不对则更新
        //将首次访问日期作为状态保存起来，等后面该设备再有日志过来的时候，从状态中获取日期，和日志产生日期进行对比
        //状态不为空，并且状态日期和当前日期不等，说明是老访客，其他情况是新访客
        //1.根据mid对日志进行分组，查看数据类型，mid在common内获取，所以这里分两部获取
        KeyedStream<JSONObject, String> midKey = jsonObjDS.keyBy(data -> data.getJSONObject("common").getString("mid"));

        //2.修复新老访客状态，访问日志中的is_new数据可能会有问题，通过确定的状态来判断is_new的1和0是否标记正确；
        /*
        数据类型如图所示
         {
         //页面日志：
         "common":{"ar":"420000","uid":"17","os":"Android 11.0","ch":"wandoujia","is_new":"0","md":"Honor 20s","mid":"mid_17","vc":"v2.1.134","ba":"Honor"},
         "page":{"page_id":"cart","during_time":5649,"last_page_id":"good_detail"},
         "ts":1625884881000
         }
         //启动日志：
         {"common":{"ar":"370000","uid":"13","os":"Android 11.0","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_18","vc":"v2.1.132","ba":"Xiaomi"},
         "start":{"entry":"icon","open_ad_skip_ms":0,"open_ad_ms":8833,"loading_time":12922,"open_ad_id":16},
         "ts":1625884882000}
         //错误日志：
         {"common":{"ar":"370000","uid":"49","os":"iOS 13.2.3","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_15","vc":"v2.1.134","ba":"iPhone"},
         "err":{"msg":" Exception in thread \\  java.net.SocketTimeoutException\\n \\tat com.atgugu.gmall2020.mock.log.bean.AppError.main(AppError.java:xxxxxx)","error_code":3653},
         "page":{"page_id":"search","during_time":8270,"last_page_id":"home"},
         "ts":1625884875000}
         曝光日志(比较长，截取）：
         {"common":{"ar":"230000","uid":"36","os":"iOS 13.3.1","ch":"Appstore","is_new":"0","md":"iPhone 8","mid":"mid_19","vc":"v2.1.132","ba":"iPhone"},
         "page":{"page_id":"home","during_time":17009},
         "displays":[{"display_type":"activity","item":"2","item_type":"activity_id","pos_id":1,"order":1}...
         "ts":1625884834000}
         */
//        SingleOutputStreamOperator<JSONObject> jsonDSWithrepair = midKey.map(
//                //输入和输出都是JSONObject格式；
//                new RichMapFunction<JSONObject, JSONObject>() {
//
//                    private ValueState<String> firstVisitDateState;//这里的时间是时间戳类型表示;
//                    //定义日期格式化对象
//                    private SimpleDateFormat sdf;
//
//                    //起始函数，执行一次
//                    @Override
//                    public void open(Configuration parameters) throws Exception {
//                        //对状态和日期格式进行初始化
//                        //保存的是当前的日期；和后续日志中数据的日期进行比对，查看是否出现错误；
//                        firstVisitDateState = getRuntimeContext().getState(
//                                new ValueStateDescriptor<String>("newMidDateState", String.class)
//                        );
//                        sdf = new SimpleDateFormat("yyyyMMdd");
//                    }
//
//                    //必须重写的方法；每个数据都执行一次map；
//                    @Override
//                    public JSONObject map(JSONObject value) throws Exception {
//                        //获取日志标记的状态
//                        //获取数据中的is_new标签，判断是不是新访客，因为可能出现数据丢失问题，这里做一个确认；
//                        String isNew = value.getJSONObject("common").getString("is_new");
//                        //日志的访问时间是时间戳;
//                        Long ts = value.getLong("ts");
//
//                        //原数据中，1表示是新访客，0表示不是；
//                        if ("1".equals(isNew)) {
//                            //获取当前mid对象的状态
//                            String stateDate = firstVisitDateState.value();
//                            //对当前日志的日期格式进行转换
//                            String logDate = sdf.format(new Date(ts));
//                            //状态不为空，并且状态日期和当前日期不等，说明是老访客，其他情况是新访客；
//                            //如果日期不等，说明日志数据标记出了问题；
//                            if (stateDate != null && stateDate.length() != 0) {
//                                //如果日期不相等，说明状态有问题
//                                if (stateDate != logDate) {
//                                    isNew = "0";
//                                    value.getJSONObject("common").put("is_new", isNew);
//                                }
//                            } else {
//                                //如果状态为空，将当前访问日期作为状态值,这个状态值也即是后续的检查值；
//                                firstVisitDateState.update(logDate);
//                            }
//                        }
//
//                        //返回修复之后的JSONObject；
//                        return value;
//                    }
//                }
//        );

        //TODO 5.根据日志数据内容,将日志数据分为 3 类, 页面日志、启动日志和曝光日志。
        // 页面日志输出到主流,启动日志输出到启动侧输出流,曝光日志输出到曝光日志侧输出流

        //侧输出流的作用：1、接收迟到数据；2、分流；
        //处理事件时间时，三重保障：1、WaterMark；2、delay时间；3、侧输出流；

        //以下定义运行时会报错；Could not determine TypeInformation for the OutputTag type.因为产生了泛型擦除，此时侧输出流中不清楚内部类型；
//        OutputTag<String> startTag = new OutputTag<>("start");
//        OutputTag<String> displayTag = new OutputTag<>("display");

        //定义启动侧输出流标签
        OutputTag<String> startTag = new OutputTag<String>("start"){};
        //定义曝光侧输出流标签
        OutputTag<String> displayTag = new OutputTag<String>("display"){};


        SingleOutputStreamOperator<String> pageDS = midKey.process(
                //这里的输出内容是要发送到kafka中，所以用String格式更好；
                new ProcessFunction<JSONObject, String>() {
                    //起始方法，执行一次
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                    }

                    //定时器开启后执行逻辑；
                    @Override
                    public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
                        super.onTimer(timestamp, ctx, out);
                    }

                    //处理方法，每一条数据都要走
                    @Override
                    public void processElement(JSONObject jsonObject, Context context, Collector<String> out) throws Exception {

                        JSONObject startJsonObj = jsonObject.getJSONObject("start");
                        //将数据的JSON格式转化为String格式，方便侧输出流输出和kafka写入；
                        String dataStr = jsonObject.toString();

                        //判断启动日志
                        if (startJsonObj != null && startJsonObj.size() > 0) {
                            //输出到启动侧输出流
                            context.output(startTag, dataStr);
                        } else {
                            //如果不是，说明是页面日志，输出到主流；
                            out.collect(dataStr);

                            //如果不是启动日志，获取曝光日志标记（曝光日志中也携带了页面）
                            JSONArray displays = jsonObject.getJSONArray("displays");
                            //判断是否为曝光日志,JSON数组的判断用.size()方法；
                            if (displays != null && displays.size() > 0) {
                                //如果是曝光日志，输出到曝光测输出流；
                                //对数组做遍历，再输出
                                //获取页面ID
                                String pageId = jsonObject.getJSONObject("page").getString("page_id");
                                for (int i = 0; i < displays.size(); i++) {
                                    //获取每一条曝光事件
                                    JSONObject displaysJsonObj = displays.getJSONObject(i);
                                    //给每一条曝光事件加pageId；
                                    displaysJsonObj.put("page_id", pageId);
                                    context.output(displayTag, displaysJsonObj.toString());
                                }
                            }
                        }
                    }
                }
        );

//        //获取测输出流
//
       DataStream<String> startDS = pageDS.getSideOutput(startTag);
       DataStream<String> displayDS = pageDS.getSideOutput(displayTag);

       /*
       测试：开启zk、kk、log.sh；
       数据类型大致为：
       page：
       page:2> {"common":{"ar":"530000","uid":"36","os":"Android 11.0","ch":"wandoujia","is_new":"0",
       "md":"Xiaomi 10 Pro ","mid":"mid_9","vc":"v2.1.134","ba":"Xiaomi"},"page":{"page_id":"good_list","item":"图书",
       "during_time":7183,"item_type":"keyword","last_page_id":"search"},"displays":[{"display_type":"recommend",
       "item":"1","item_type":"sku_id","pos_id":5,"order":1},{"display_type":"recommend","item":"5",
       "item_type":"sku_id","pos_id":2,"order":2},{"display_type":"query","item":"2","item_type":"sku_id","pos_id":2,
       "order":3},{"display_type":"promotion","item":"5","item_type":"sku_id","pos_id":3,"order":4},
       {"display_type":"promotion","item":"9","item_type":"sku_id","pos_id":4,"order":5},{"display_type":"query",
       "item":"9","item_type":"sku_id","pos_id":5,"order":6}],"ts":1626684732000}

       display：
       displays:1> {"display_type":"promotion","page_id":"good_detail","item":"6","item_type":"sku_id","pos_id":3,"order":1}

        */
       pageDS.print("page");
       startDS.print("start");
       displayDS.print("displays");


        //TODO 6.将不同流的数据写回到kafka的不同topic中
        FlinkKafkaProducer<String> startSink = MyKafkaUtil.getKafkaSink(TOPIC_START);
        startDS.addSink(startSink);

        FlinkKafkaProducer<String> displaySink = MyKafkaUtil.getKafkaSink(TOPIC_DISPLAY);
        displayDS.addSink(displaySink);

        FlinkKafkaProducer<String> pageSink = MyKafkaUtil.getKafkaSink(TOPIC_PAGE);
        pageDS.addSink(pageSink);

        env.execute();
    }

}