package com.atguigu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseAppV1;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.AtguiguUtil;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/8/24 9:51
 */
public class DwdLog extends BaseAppV1 {
    
    private final String START = "start";
    private final String PAGE = "page";
    private final String DISPLAY = "display";
    
    public static void main(String[] args) {
        new DwdLog().init(2001, 1, "DwdLog", "DwdLog", Constant.TOPIC_ODS_LOG);
        
    }
    
    @Override
    protected void run(StreamExecutionEnvironment env,
                       DataStreamSource<String> sourceStream) {
        // 写业务
        // 1. 区别新老用户. 对已有的字段做纠正 is_new
        SingleOutputStreamOperator<JSONObject> validatedStream = distinguishOldOrNew(sourceStream);
        // 2. 分流  启动  页面日志  曝光日志
        HashMap<String, DataStream<JSONObject>> threeStreams = splitSteam(validatedStream);
        // 3. 不同的流写入到kafka的不同topic
        write2Kafka(threeStreams);   // logforj -> logfourj->log4j  atob->atwob->a2b   i18n 国际化
    }
    
    private void write2Kafka(HashMap<String, DataStream<JSONObject>> streams) {
        streams
            .get(START)
            .map(JSONAware::toJSONString)
            .addSink(FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_START));
        
        streams
            .get(PAGE)
            .map(JSONAware::toJSONString)
            .addSink(FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_PAGE));
        
        streams
            .get(DISPLAY)
            .map(JSONAware::toJSONString)
            .addSink(FlinkSinkUtil.getKafkaSink(Constant.TOPIC_DWD_DISPLAY));
        
    }
    
    // 对流进行分流
    private HashMap<String, DataStream<JSONObject>> splitSteam(SingleOutputStreamOperator<JSONObject> validatedStream) {
        OutputTag<JSONObject> pageTag = new OutputTag<JSONObject>("page") {};
        OutputTag<JSONObject> displayTag = new OutputTag<JSONObject>("display") {};
        
        SingleOutputStreamOperator<JSONObject> startStream = validatedStream.process(new ProcessFunction<JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject value,
                                       Context ctx,
                                       Collector<JSONObject> out) throws Exception {
                // 启动日志放入: 主流   页面和曝光: 侧输出流
                JSONObject start = value.getJSONObject("start");
                if (start != null) {// 启动日志
                    out.collect(value);
                } else {
                    
                    // 如果是页面
                    JSONObject page = value.getJSONObject("page");
                    if (page != null) {
                        ctx.output(pageTag, value);
                    }
                    // 如果是曝光
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null) {
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject display = displays.getJSONObject(i);
                            //1. 补充时间戳
                            display.put("ts", value.getLong("ts"));
                            //2. 补充page_id
                            display.put("page_id", value.getJSONObject("page").getString("page_id"));
                            // 3. 补充common所有信息
                            display.putAll(value.getJSONObject("common"));
                            
                            ctx.output(displayTag, display);
                        }
                    }
                    
                }
                
            }
        });
        
        HashMap<String, DataStream<JSONObject>> result = new HashMap<>();
        result.put(START, startStream);
        result.put(PAGE, startStream.getSideOutput(pageTag));
        result.put(DISPLAY, startStream.getSideOutput(displayTag));
        return result;
    }
    
    private SingleOutputStreamOperator<JSONObject> distinguishOldOrNew(DataStreamSource<String> sourceStream) {
        /*
        实现思路:
            解决乱序问题, 事件时间+窗口
            
            找到属于某个用户的第一个窗口, 这个窗口内最小的那个时间戳所在的事件应该改成 is_new=1
            其他的事件都是 is_new=0
            
            其他所有窗口内的is_new=0
         */
        return sourceStream
            .map(JSON::parseObject)
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((obj, ts) -> obj.getLong("ts"))
            )
            .keyBy(obj -> obj.getJSONObject("common").getString("mid"))
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .process(new ProcessWindowFunction<JSONObject, JSONObject, String, TimeWindow>() {
                
                private ValueState<Boolean> firstWindowState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    firstWindowState = getRuntimeContext()
                        .getState(new ValueStateDescriptor<Boolean>("firstWindowState", Boolean.class));
                }
                
                @Override
                public void process(String key,
                                    Context ctx,
                                    Iterable<JSONObject> elements,
                                    Collector<JSONObject> out) throws Exception {
                    // 这状态如果是null, 表示属于这个mid的第一个窗口出现, 需要安装时间戳进行排序,
                    // 把最小的is_new置为1 , 其他置为0
                    if (firstWindowState.value() == null) {
                        firstWindowState.update(true);  // 更新状态: 后面的窗口就不会再进来
                        List<JSONObject> list = AtguiguUtil.toList(elements);
                        list.sort(Comparator.comparing(o -> o.getLong("ts")));
                        for (int i = 0; i < list.size(); i++) {
                            JSONObject obj = list.get(i);
                            if (i == 0) {
                                obj.getJSONObject("common").put("is_new", "1");
                            } else {
                                obj.getJSONObject("common").put("is_new", "0");
                            }
                            
                            out.collect(obj);
                        }
                        
                    } else {
                        for (JSONObject element : elements) {
                            element.getJSONObject("common").put("is_new", "0");
                            out.collect(element);
                        }
                    }
                }
            });
        
    }
}
/*
消费ods_log数据, 进行分流, 然后根据不同的流的数据写入到不同的kafka的topic中, 就是我们的dwd层

 */
