package com.zhu.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.zhu.config.ClusterParametersConfig;
import com.zhu.utils.DataFormatUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;



/**
 * 日志数据分流
 * 1. 拆分
 * 2. 数据的ETL，脏数据写入侧输出流
 * 3. 新老用户的标识，用户卸载app，再次下载会被标记成新用户，状态
 *     * 状态为null，is_new为1，则将日志ts对应日期更新到状态中，不对is_new字段修改
 *     * 状态不为null,is_new为0，说明用户之前卸载过又重新下载了，首次访问日期不为今天，则将is_new改为1
 *     * 状态不是null,且首次访问,日期为当天is_new 为1，不做操作
 *     * **如果is_new为0，状态为null，说明以前注册过，计算退后了，将状态改为昨天，解决一批数据**
 *
 * 分流  启动和页面是互斥关系 app web 页面和曝光和动作为包含关系  错误
 */

/*
数据流  web/app -> Nginx -> 日志服务器 落盘logFile -> Flume -> Kafka(ODS) ->FlinkApp -> Kafka(DWD)
程序： MockData log.sh ->Flume -> kafka(Zookeeper) -> BaseLogApp ->Kafka(Zookeeper)
 */
public class DWDTrafficBaseApp {

    public static void main(String[] args) throws Exception {

        //todo 1. get environment
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);  //kafka topic partitionNum = 4
        //checkpoint
        /*
        streamExecutionEnvironment.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE); //精确一次
        //状态后端
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略
         */

        //todo 2.consumer kafka_topic topic_log to DStream
        String topic = ClusterParametersConfig.KAFKA_BASE_LOG_TOPIC;
        String groupId = "base_log_app_zhu_2023";
        DataStreamSource<String> kafkaBaseLogDStream =
                streamExecutionEnvironment.addSource(ZhuKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

        //todo 3.filter not json data & getJSONObject
        OutputTag<String> dirtyTag = new OutputTag<String>("Dirty") {};
        SingleOutputStreamOperator<JSONObject> jsonObjLogDStream = kafkaBaseLogDStream.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, ProcessFunction<String, JSONObject>.Context context,
                                       Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    collector.collect(jsonObject);
                } catch (Exception exception) {
                    //error dirtyTag
                    context.output(dirtyTag, value);
                }
            }
        });
        //get dirty data and print on console
        DataStream<String> dirtyDS = jsonObjLogDStream.getSideOutput(dirtyTag);
        dirtyDS.print("Dirty>>>>>");    //write to MySQL

        //todo 4.group by mid each mid has a state
        /*
        Data :{"common":{"ar":"440000","ba":"Xiaomi","ch":"web","is_new":"0","md":"Xiaomi 10 Pro ",
        "mid":"mid_105624","os":"Android 11.0","uid":"556","vc":"v2.1.134"},
        "page":{"during_time":1579,"last_page_id":"good_detail","page_id":"cart"},"ts":1680354196000}
         */
        KeyedStream<JSONObject, String> keyedStream =
                jsonObjLogDStream.keyBy(jsonObject -> jsonObject.getJSONObject("common").getString("mid"));

        //todo 5.mid stateAdmin
        //RichFunction RichMapFunction
        SingleOutputStreamOperator<JSONObject> jsonObjWithNewFlagDStream = keyedStream.map(new RichMapFunction<JSONObject, JSONObject>() {

            private ValueState<String> lastVisitDtState;

            @Override
            public JSONObject map(JSONObject value) throws Exception {
                //get is_new and ts
                String isNew = value.getJSONObject("common").getString("is_new");
                Long timeStamp = value.getLong("ts");
                String currentDataDate = DataFormatUtil.toDate(timeStamp);  // this data's date
                //get state's date
                String lastDate = lastVisitDtState.value();
                if ("1".equals(isNew)) {
                    if (lastDate == null) {
                        lastVisitDtState.update(currentDataDate);  //new user update state date
                    } else if (!lastDate.equals(currentDataDate)) {
                        //is a old user
                        value.getJSONObject("common").put("is_new","0");  //nullPointException
                    }// state not null is_new != 0
                } else if (lastDate == null) {
                    lastVisitDtState.update(DataFormatUtil.toDate(timeStamp - 24 * 60 * 60 * 1000L));

                }
                return value;
            }
            @Override
            public void open(Configuration parameters) throws Exception {
                lastVisitDtState = getRuntimeContext().getState(new ValueStateDescriptor<String>("visit_date", String.class));
            }
        });
        //todo 6. five DStream
        OutputTag<String> startTag = new OutputTag<String>("start") {};
        OutputTag<String> displayTag= new OutputTag<String>("displays") {};
        OutputTag<String> actionTag = new OutputTag<String>("actions") {};
        OutputTag<String> errorTag= new OutputTag<String>("error") {};


        SingleOutputStreamOperator<String> pageDStream = jsonObjWithNewFlagDStream.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, ProcessFunction<JSONObject, String>.Context context, Collector<String> collector) throws Exception {
                String err = value.getString("err");
                if (err != null) {
                    //put err data to stream
                    context.output(errorTag, value.toJSONString());
                }
                value.remove("err");
                String start = value.getString("start");
                if (start != null) {  //put to start
                    context.output(startTag, value.toJSONString());
                } else { //put to page
                    String common = value.getString("common");
                    String pageId = value.getJSONObject("page").getString("page_id");
                    Long ts = value.getLong("ts");
                    //display
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null && displays.size() > 0) {
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject displayDataJson = displays.getJSONObject(i);
                            displayDataJson.put("common", common);
                            displayDataJson.put("page_id", pageId);
                            displayDataJson.put("ts", ts);
                            context.output(displayTag, displayDataJson.toJSONString());  //补充common字段
                        }
                    }
                    //actions
                    JSONArray actions = value.getJSONArray("actions");
                    if (actions != null && actions.size() > 0) {
                        for (int i = 0; i < actions.size(); i++) {
                            JSONObject actionDataJson = actions.getJSONObject(i);
                            actionDataJson.put("common", common);
                            actionDataJson.put("page_id", pageId);
                            context.output(actionTag, actionDataJson.toJSONString());  //补充common字段
                        }
                    }

                    value.remove("displays");
                    value.remove("actions");
                    collector.collect(value.toJSONString());   //actions ? disPlay ?
                }
            }
        });

        //todo 7.get
        DataStream<String> startDS = pageDStream.getSideOutput(startTag);
        DataStream<String> displayDS = pageDStream.getSideOutput(displayTag);
        DataStream<String> actionDS = pageDStream.getSideOutput(actionTag);
        DataStream<String> errDS = pageDStream.getSideOutput(errorTag);

        //todo 8.write five DStream to kafka_topic
        pageDStream.print("Page>>>>");
        startDS.print("Start>>>>");
        displayDS.print("DisPlay>>>>");
        actionDS.print("Action>>>>");
        errDS.print("Err>>>");

        String page_topic = "dwd_traffic_page_log";
        String start_topic = "dwd_traffic_start_log";
        String display_topic = "dwd_traffic_display_log";
        String action_topic = "dwd_traffic_action_log";
        String error_topic = "dwd_traffic_error_log";

        pageDStream.addSink(ZhuKafkaUtil.getFlinkKafkaProducer(page_topic));
        startDS.addSink(ZhuKafkaUtil.getFlinkKafkaProducer(start_topic));
        displayDS.addSink(ZhuKafkaUtil.getFlinkKafkaProducer(display_topic));
        actionDS.addSink(ZhuKafkaUtil.getFlinkKafkaProducer(action_topic));
        errDS.addSink(ZhuKafkaUtil.getFlinkKafkaProducer(error_topic));

        //todo 9.execute DWDInteractionCommentApp
        streamExecutionEnvironment.execute("BaseLogApp");
        }

}

