package com.wsjj.gmall;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.wsjj.gmall.base.BaseApp;
import com.wsjj.gmall.constant.Constant;
import com.wsjj.gmall.util.DateFormatUtil;
import com.wsjj.gmall.util.FlinkSinkUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

//日志流事实处理
public class DwdBaseLog extends BaseApp {


    public static void main(String[] args) {
        new DwdBaseLog().start(10011,4,"topic_log","dwdbaselog");

    }
    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
//TODO 1.从kafka中的topic_log中读取数据，并且ETL(是json的往下传递（正常数据），不是json输入到侧输出流（脏数据）)

//        注意：一定要写{}，不然会报错，泛型擦除问题
        OutputTag<String> dwdbaselog = new OutputTag<String>("dwdbaselog"){};

//        不写{}，也可以这样写
//        OutputTag<String> aaaa = new OutputTag<>("aaaa", TypeInformation.of(String.class));

//        etl清洗，过滤不是json
        SingleOutputStreamOperator<JSONObject> process = etl(stream, dwdbaselog);

//TODO       2. 新老用户  修复  (设备id为1 新访客  设备id为0 老访客 )
//        产生的原因： 如果是用户以前访问过网站：记录设备id为0，但是用户请空手机或者电脑的浏览记录，mid就清空了，在次访问网站  mid就为1  就产生了数据不准确的问题， 所以需要修复


        SingleOutputStreamOperator<JSONObject> map = fixeNewAndOld(process);


        map.print("新老用户修复：");

//TODO   3.进行分流处理    错误数据------->错误侧输出流    启动数据-------->启动数据侧输出流     曝光数据------->曝光数据侧输出流     动作数据-------->动作数据侧输出流     页面数据 ------->页面侧输出流

        OutputTag<String> errPutTag = new OutputTag<String>("errPutTag") {};
        OutputTag<String> startPutTag = new OutputTag<String>("startPutTag") {};
        OutputTag<String> displayTag = new OutputTag<String>("displayTag") {};
        OutputTag<String> actionTag = new OutputTag<String>("actionTag") {};

//        进行分流
        SingleOutputStreamOperator<String> processDS = splitDS(map, errPutTag, startPutTag, displayTag, actionTag);

//      分流的数据写到kafka中
        toKafka(processDS, errPutTag, startPutTag, displayTag, actionTag);


    }

    private static void toKafka(SingleOutputStreamOperator<String> processDS, OutputTag<String> errPutTag, OutputTag<String> startPutTag, OutputTag<String> displayTag, OutputTag<String> actionTag) {
        SideOutputDataStream<String> erroutput = processDS.getSideOutput(errPutTag);
        erroutput.print("err:");
        KafkaSink<String> topicDwdTrafficErr = FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_ERR, "topic_dwd_traffic_err");
        erroutput.sinkTo(topicDwdTrafficErr);

        SideOutputDataStream<String> startoutput = processDS.getSideOutput(startPutTag);
        startoutput.print("start");
        KafkaSink<String> topicDwdTrafficStart = FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_START, "topic_dwd_traffic_start");
        startoutput.sinkTo(topicDwdTrafficStart);

        SideOutputDataStream<String> displayoutput = processDS.getSideOutput(displayTag);
        displayoutput.print("display");
        KafkaSink<String> topicDwdTrafficDisplay = FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_DISPLAY, "topic_dwd_traffic_display");
        displayoutput.sinkTo(topicDwdTrafficDisplay);

        SideOutputDataStream<String> actionoutput = processDS.getSideOutput(actionTag);
        actionoutput.print("action");
        KafkaSink<String> topicDwdTrafficAction = FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_ACTION, "topic_dwd_traffic_action");
        actionoutput.sinkTo(topicDwdTrafficAction);


        processDS.print();
        KafkaSink<String> topicDwdTrafficPage = FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_PAGE, "topic_dwd_traffic_page");
        processDS.sinkTo(topicDwdTrafficPage);
    }

    private static SingleOutputStreamOperator<String> splitDS(SingleOutputStreamOperator<JSONObject> map, OutputTag<String> errPutTag, OutputTag<String> startPutTag, OutputTag<String> displayTag, OutputTag<String> actionTag) {
        SingleOutputStreamOperator<String> processDS = map.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, ProcessFunction<JSONObject, String>.Context ctx, Collector<String> out) throws Exception {
//                错误日志
                JSONObject err = value.getJSONObject("err");
                if (err != null) {
                    ctx.output(errPutTag, value.toJSONString());
                    value.remove("err");
                }

                JSONObject start = value.getJSONObject("start");
                if (start != null) {
//                启动日志,如果是启动日志一定不是页面日志和曝光日志
                    ctx.output(startPutTag, value.toJSONString());
                } else {
                    JSONArray displays = value.getJSONArray("displays");
                    JSONObject common = value.getJSONObject("common");
                    JSONObject page = value.getJSONObject("page");
                    String ts = value.getString("ts");
                    if (displays != null && displays.size() > 0) {
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject jsonObject = displays.getJSONObject(i);
                            jsonObject.put("common", common);
                            jsonObject.put("page", page);
                            jsonObject.put("ts", ts);

                            ctx.output(displayTag, jsonObject.toJSONString());
                            value.remove("displays");
                        }
                    }

                    JSONArray actions = value.getJSONArray("actions");

                    if (actions != null && actions.size() > 0) {
                        for (int i = 0; i < actions.size(); i++) {
                            JSONObject jsonObject = actions.getJSONObject(i);
                            jsonObject.put("common", common);
                            jsonObject.put("page", page);
                            jsonObject.put("ts", ts);

                            ctx.output(actionTag, jsonObject.toJSONString());
                            value.remove("actions");

                        }


                    }


                }

                out.collect(value.toJSONString());


            }
        });
        return processDS;
    }

    private static SingleOutputStreamOperator<JSONObject> fixeNewAndOld(SingleOutputStreamOperator<JSONObject> process) {
        KeyedStream<JSONObject, String> jsonObjectStringKeyedStream = process.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {

                JSONObject common = value.getJSONObject("common");
                return common.getString("mid");
            }
        });

        jsonObjectStringKeyedStream.print("adkl");

        SingleOutputStreamOperator<JSONObject> map = jsonObjectStringKeyedStream.map(new RichMapFunction<JSONObject, JSONObject>() {

            ValueState<String> StringState = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> stringStream = new ValueStateDescriptor<>("stringStream", String.class);
                StringState = getRuntimeContext().getState(stringStream);
            }

            @Override
            public JSONObject map(JSONObject value) throws Exception {

                JSONObject common = value.getJSONObject("common");
                Integer isNew = common.getInteger("is_new");
//                String mid = common.getString("mid");
                Long ts = value.getLong("ts");
                System.out.println(ts);
                String tsDate = DateFormatUtil.tsToDate(ts - 24 * 60 * 60 * 1000);


                String value1 = StringState.value();
                if (isNew == 1) {
                    if (!StringUtils.isEmpty(value1)) {
                        common.put("is_new", "0");
                        StringState.update(tsDate);
                    } else {
                        StringState.update(tsDate);
                    }
                } else {
                    StringState.update(tsDate);
                }

                System.out.println(StringState.value());

                return value;


            }



        });
        return map;
    }

    private static SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream, OutputTag<String> dwdbaselog) {
        SingleOutputStreamOperator<JSONObject> process = stream.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(value);
                    out.collect(jsonObject);
                } catch (Exception e) {
                    ctx.output(dwdbaselog, value);
                }

            }
        });

        process.print("正常数据：");
        SideOutputDataStream<String> sideOutput = process.getSideOutput(dwdbaselog);
        sideOutput.print("脏数据：");

//      将脏数据输入到  dirty_data  侧输出流中
        KafkaSink<String> kafkaSink = FlinkSinkUtil.getKafkaSink("dirty_data", "dwd_base_log_");
        sideOutput.sinkTo(kafkaSink);
        return process;
    }


}
