package com.atguigu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import jdk.nashorn.internal.runtime.linker.Bootstrap;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;

public class BaseLogApp1 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers","hadoop100:9092");

        DataStreamSource<String> source = env.addSource(new FlinkKafkaConsumer<>("ods_base_log", new SimpleStringSchema(), properties));

        SingleOutputStreamOperator<JSONObject> jsonDS = source.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                return jsonObject;
            }
        });

        jsonDS.print("json>>>>>");

        KeyedStream<JSONObject, String> midDS = jsonDS.keyBy(
                data -> data.getJSONObject("common").getString("mid")
        );

        SingleOutputStreamOperator<JSONObject> jsonDSS = midDS.map(new RichMapFunction<JSONObject, JSONObject>() {

            private ValueState<String> firstVisitDateState;

            private SimpleDateFormat sdf;

            @Override
            public void open(Configuration parameters) throws Exception {
                firstVisitDateState = getRuntimeContext().getState(
                        new ValueStateDescriptor<String>("newMidDateState", String.class)
                );
                sdf = new SimpleDateFormat("yyyyMMdd");
            }

            @Override
            public JSONObject map(JSONObject jsonObject) throws Exception {
                String isNew = jsonObject.getJSONObject("common").getString("is_new");

                Long ts = jsonObject.getLong("ts");

                if ("1".equals(isNew)) {
                    String value = firstVisitDateState.value();

                    String curDate = sdf.format(new Date(ts));

                    if (value != null && curDate.length() != 0) {
                        if (!value.equals(curDate)) {
                            isNew = "0";
                            jsonObject.getJSONObject("common").put("is_new", isNew);
                        }
                    } else {
                        firstVisitDateState.update(curDate);
                    }
                }
                return jsonObject;
            }
        });

        jsonDSS.print("过滤后>>>>>");

        OutputTag<String> startTag = new OutputTag<String>("start") {};

        OutputTag<String> displayTag = new OutputTag<String>("display") {};

        SingleOutputStreamOperator<String> process = jsonDSS.process(
                new ProcessFunction<JSONObject, String>() {
                    @Override
                    public void processElement(JSONObject jsonObject, Context context, Collector<String> collector) throws Exception {
                        JSONObject startJson = jsonObject.getJSONObject("start");
                        String dataStr = jsonObject.toString();

                        if (startJson != null && startJson.size() > 0) {
                            context.output(startTag, dataStr);
                        } else {
                            collector.collect(dataStr);

                            JSONArray displays = jsonObject.getJSONArray("displays");

                            if (displays != null && displays.size() > 0) {
                                //如果是曝光日志，遍历输出到侧输出流
                                for (int i = 0; i < displays.size(); i++) {
                                    //获取每一条曝光事件
                                    JSONObject displaysJsonObj = displays.getJSONObject(i);
                                    //获取页面id
                                    String pageId = jsonObject.getJSONObject("page").getString("page_id");
                                    //给每一条曝光事件加pageId
                                    displaysJsonObj.put("page_id", pageId);
                                    context.output(displayTag, displaysJsonObj.toString());
                                }
                            }
                        }
                    }
                }
        );

        DataStream<String> sideOutput = process.getSideOutput(startTag);
        DataStream<String> sideOutput1 = process.getSideOutput(displayTag);

        process.print();
        sideOutput.print();
        sideOutput1.print();

        process.addSink(new FlinkKafkaProducer<String>("hadoop100:2181","dwd_page_log",new SimpleStringSchema()));
        sideOutput.addSink(new FlinkKafkaProducer<String>("hadoop100:2181","dwd_start_log",new SimpleStringSchema()));
        sideOutput1.addSink(new FlinkKafkaProducer<String>("hadoop100:2181","dwd_display_log",new SimpleStringSchema()));

        env.execute();
    }
}
