package com.lsx143.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.lsx143.realtime.app.BaseApp;
import com.lsx143.realtime.common.Constants;
import com.lsx143.realtime.util.KafkaUtil;
import com.lsx143.realtime.util.MyUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;

/**
 * ods -> dwd 日志数据模块
 */
public class DWDLogApp extends BaseApp {
    /**
     * 常量
     */
    private static final String START = "start";
    private static final String DISPLAY = "displays";
    private static final String PAGE = "page";

    public static void main(String[] args) {
        //实例化app，并执行初始化操作，初始化中会调用run方法执行业务逻辑
        new DWDLogApp().init(10001,
                "DWDLogApp",
                1,
                "DWDLogApp",
                Constants.TOPIC_ODS_LOG);
    }

    /**
     * 业务逻辑
     *
     * @param env       flink的执行环境
     * @param srcStream 从kafka输入的数据流
     */
    @Override
    protected void run(StreamExecutionEnvironment env, DataStreamSource<String> srcStream) {
        System.out.println("【DWDLogApp】 模块启动");
        //1、识别新老客户，并在日志数据中添加一个新的字段来表示是否是新老客户
        SingleOutputStreamOperator<JSONObject> distinguishStream = distinguishNewOrOld(srcStream);
        //2、流的拆分，利用测输出流
        HashMap<String, DataStream<String>> streamMap = splitStream(distinguishStream);
        //3、分别写入到不同的topic
        sendToKafka(streamMap);
    }


    /**
     * 判断新老客户
     *
     * @param srcStream 从kafka输入的流
     * @return SingleOutputStreamOperator
     */
    private SingleOutputStreamOperator<JSONObject> distinguishNewOrOld(DataStreamSource<String> srcStream) {
        return srcStream
                //1、map将数据转换为JSON
                .map(JSON::parseObject)
                //2、添加水印
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                                .withTimestampAssigner((jsonObject, recordTimestamp) -> jsonObject.getLong("ts"))
                )
                //3、按照mid进行分组
                .keyBy(jsonData -> jsonData.getJSONObject("common").getString("mid"))
                //4、开窗判断是否新老用户
                .window(TumblingEventTimeWindows.of(Time.seconds(5L)))
                .process(new ProcessWindowFunction<JSONObject, JSONObject, String, TimeWindow>() {
                    //1、保存第一次登录的时间戳
                    private ValueState<Long> firstTimeState;

                    @Override
                    public void open(Configuration parameters) {
                        firstTimeState = getRuntimeContext()
                                .getState(new ValueStateDescriptor<>("firstTimeState", Long.class));
                    }

                    @Override
                    public void process(String key,
                                        Context ctx,
                                        Iterable<JSONObject> iterable,
                                        Collector<JSONObject> collector) throws Exception {
                        List<JSONObject> jsonObjects = MyUtil.toList(iterable);
                        if (firstTimeState == null) {
                            //1、转换为集合，按照时间排序
                            jsonObjects.sort(Comparator.comparingLong(o -> o.getLong("ts")));
                            for (int i = 0; i < jsonObjects.size(); i++) {
                                if (i == 0) {
                                    //2、将最小的ts当成最早登录时间
                                    jsonObjects.get(i).getJSONObject("common").put("is_new", 1);
                                    firstTimeState.update(jsonObjects.get(i).getLong("ts"));
                                } else {
                                    //3、其余的置零，老用户
                                    jsonObjects.get(i).getJSONObject("common").put("is_new", 0);
                                }
                                //输出
                                collector.collect(jsonObjects.get(i));
                            }
                        } else {
                            for (JSONObject jsonObject : jsonObjects) {
                                //3、其余的置零，老用户
                                jsonObject.getJSONObject("common").put("is_new", 0);
                                collector.collect(jsonObject);
                            }
                        }
                    }
                });
    }

    /**
     * 分流函数
     *
     * @param distinguishStream 待分流数据
     * @return 分流后的数据
     */
    private HashMap<String, DataStream<String>> splitStream(SingleOutputStreamOperator<JSONObject> distinguishStream) {
        //1、测输出流的标签
        OutputTag<String> startTag = new OutputTag<String>("startStream") {
        };
        OutputTag<String> displayTag = new OutputTag<String>("displayStream") {
        };
        //2、进行判断
        SingleOutputStreamOperator<String> resultStream = distinguishStream.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value,
                                       Context ctx,
                                       Collector<String> out) {
                JSONObject start = value.getJSONObject(START);
                if (start != null) {
                    //1、不为空则为启动日志
                    System.out.println("startTag=" + value.toJSONString());
                    ctx.output(startTag, value.toJSONString());
                } else {
                    System.out.println(value.toJSONString());
                    //2、页面日志或者曝光日志
                    // 页面日志
                    JSONObject page = value.getJSONObject("page");
                    if (page != null) {
                        out.collect(value.toJSONString());
                    }
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null) {
                        //曝光日志
                        Long ts = value.getLong("ts");
                        JSONObject page1 = value.getJSONObject("page");
                        JSONObject common = value.getJSONObject("common");
                        if (displays.size() > 0) {
                            for (int i = 0; i < displays.size(); i++) {
                                JSONObject item = displays.getJSONObject(i);
                                item.put("ts", ts);
                                item.put("common", common);
                                item.put("page", page1);
                                ctx.output(displayTag, item.toJSONString());
                            }
                        }
                    }
                }
            }
        });
        //3、得到侧输出流
        DataStream<String> startStream = resultStream.getSideOutput(startTag);
        DataStream<String> displayStream = resultStream.getSideOutput(displayTag);
        HashMap<String, DataStream<String>> streamMap = new HashMap<>();
        streamMap.put(START, startStream);
        streamMap.put(DISPLAY, displayStream);
        streamMap.put(PAGE, resultStream);
        return streamMap;
    }

    /**
     * 输出log数据到kafka
     *
     * @param streamMap 分流后的数据
     */
    private void sendToKafka(HashMap<String, DataStream<String>> streamMap) {
        //加载sink
        DataStream<String> start = streamMap.get(START);
        DataStream<String> display = streamMap.get(DISPLAY);
        DataStream<String> page = streamMap.get(PAGE);
        start.addSink(KafkaUtil.getKafkaSink(Constants.TOPIC_DWD_START_LOG));
        display.addSink(KafkaUtil.getKafkaSink(Constants.TOPIC_DWD_DISPLAY_LOG));
        page.addSink(KafkaUtil.getKafkaSink(Constants.TOPIC_DWD_PAGE_LOG));
    }
}
