package com.atguigu.edu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.DimAsyncFunction;
import com.atguigu.edu.realtime.bean.CourseUserReviewBean;
import com.atguigu.edu.realtime.bean.PaperScoreSectionBean;
import com.atguigu.edu.realtime.bean.TrafficSessionBean;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.MyClickhouseUtil;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * @Author zhangsan
 * @Date 2022/10/20 22:12
 * @Description //TODO dws 层 流量域  来源 访客类型 维度  统计 会话数 页面浏览数 独立访客 跳出会话数
 */
public class DwsTrafficScIsNewPageWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关的设置(略)
        //TODO 3.从kafka的dwd_traffic_page_log主题中读取数据
        //3.1 声明消费的主题以及消费者组
        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_page_session_group";
        //3.2 创建消费者对象
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        //3.3 消费数据 封装为流
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaConsumer);



        //TODO 4.对读取的数据进行类型转换   jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(JSON::parseObject);

        //TODO 5.指定Watermark以及提取事件时间字段
        SingleOutputStreamOperator<JSONObject> withWatermarkDS = jsonObjDS.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<JSONObject>() {
                                    @Override
                                    public long extractTimestamp(JSONObject jsonObj, long recordTimestamp) {
                                        return Long.valueOf(jsonObj.getString("ts"));
                                    }
                                }
                        )
        );
        //TODO 6.按照mid进行分组
        KeyedStream<JSONObject, String> keyedDS = withWatermarkDS.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("sid"));

        //TODO 7.使用 Flink状态编程
        SingleOutputStreamOperator<TrafficSessionBean> processDS = keyedDS.process(
                new KeyedProcessFunction<String, JSONObject, TrafficSessionBean>() {
                    private ListState<JSONObject> jsonObjListState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ListStateDescriptor<JSONObject> pageListStateDescriptor = new ListStateDescriptor<>("pageListState", JSONObject.class);
                        jsonObjListState = getRuntimeContext().getListState(pageListStateDescriptor);
                    }

                    @Override
                    public void processElement(JSONObject jsonObj, Context ctx, Collector<TrafficSessionBean> out) throws Exception {
                        jsonObjListState.add(jsonObj);
                        ctx.timerService().registerProcessingTimeTimer(ctx.timerService().currentProcessingTime() + 10 * 1000L);
                    }

                    @Override
                    public void onTimer(long timestamp, OnTimerContext ctx, Collector<TrafficSessionBean> out) throws Exception {
                        ArrayList<JSONObject> arrayList = new ArrayList<>();

                        //Long sessionCnt = 1L;
                        Long jumpSessionCnt = 0L;
                        Long pageCnt = 0L;
                        Long duringTimeSum = 0L;
                        for (JSONObject jsonObj : jsonObjListState.get()) {
                            arrayList.add(jsonObj);
                        }
                        pageCnt = Long.valueOf(arrayList.size());
                        JSONObject jsonObj = arrayList.get(0);
                        if (arrayList.size() == 1) {
                            jumpSessionCnt = 1L;
                        } else if (arrayList.size() > 1) {
                            for (JSONObject js : arrayList) {
                                duringTimeSum += Long.valueOf(jsonObj.getJSONObject("page").getString("during_time"));
                            }
                        }
                        String sessionId = jsonObj.getJSONObject("common").getString("sid");
                        String sc = jsonObj.getJSONObject("common").getString("sc");
                        String isNew = jsonObj.getJSONObject("common").getString("is_new");
                        String pageId = jsonObj.getJSONObject("page").getString("page_id");
                        String duringTime = jsonObj.getJSONObject("page").getString("during_time");
                        String ts = jsonObj.getString("ts");
                        TrafficSessionBean sessionBean = TrafficSessionBean.builder()
                                .sessionId(ctx.getCurrentKey())
                                .sourceId(sc)
                                .isNew(isNew)
                                .sessionCnt(1L)
                                .pageCnt(pageCnt)
                                .durTimeSum(duringTimeSum)
                                .jumpSessionCnt(jumpSessionCnt)
                                .uniqueUserCnt(0L)
                                .ts(Long.valueOf(ts))
                                .build();
                        out.collect(sessionBean);

                    }
                }

        );

        // TODO 8. 从 Kafka dwd_traffic_unique_visitor_detail 主题读取独立访客数据，封装为流
        String uvTopic = "dwd_traffic_unique_visitor_detail";
        FlinkKafkaConsumer<String> uvKafkaConsumer = MyKafkaUtil.getKafkaConsumer(uvTopic, groupId);
        DataStreamSource<String> uvSource = env.addSource(uvKafkaConsumer);
        SingleOutputStreamOperator<TrafficSessionBean> uvMappedStream =
                uvSource.map(jsonStr -> {
                    JSONObject jsonObj = JSON.parseObject(jsonStr);
                    JSONObject common = jsonObj.getJSONObject("common");
                    Long ts = jsonObj.getLong("ts");

                    // 获取维度信息
                    String sc = common.getString("sc");
                    String isNew = common.getString("is_new");

                    // 封装为实体类
                    TrafficSessionBean sessionBean = TrafficSessionBean.builder()
                            .sourceId(sc)
                            .isNew(isNew)
                            .sessionCnt(0L)
                            .pageCnt(0L)
                            .durTimeSum(0L)
                            .jumpSessionCnt(0L)
                            .uniqueUserCnt(1L)
                            .ts(ts)
                            .build();

                    return sessionBean;

                });

        DataStream<TrafficSessionBean> pageDS = processDS.union(uvMappedStream);
        //TODO 9.水位线
        SingleOutputStreamOperator<TrafficSessionBean> winDS = pageDS.assignTimestampsAndWatermarks(
                WatermarkStrategy.<TrafficSessionBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<TrafficSessionBean>() {
                                    @Override
                                    public long extractTimestamp(TrafficSessionBean trafficSessionBean, long l) {
                                        return trafficSessionBean.getTs();
                                    }
                                }
                        )
        );
        // TODO 10.根据维度分组
        KeyedStream<TrafficSessionBean, Tuple2<String, String>> keyDS = winDS.keyBy(
                new KeySelector<TrafficSessionBean, Tuple2<String, String>>() {
                    @Override
                    public Tuple2<String, String> getKey(TrafficSessionBean sessionBean) throws Exception {
                        return Tuple2.of(sessionBean.getSessionId(),sessionBean.getIsNew());
                    }
                }
        );

        //TODO 11.开窗
        WindowedStream<TrafficSessionBean, Tuple2<String, String>, TimeWindow> windowDS = keyDS.window(TumblingEventTimeWindows.of(Time.seconds(20)));
        //TODO 12.聚合
        SingleOutputStreamOperator<TrafficSessionBean> reduceDS = windowDS.reduce(
                new ReduceFunction<TrafficSessionBean>() {
                    @Override
                    public TrafficSessionBean reduce(TrafficSessionBean value1, TrafficSessionBean value2) throws Exception {
                        value1.setSessionCnt(value1.getSessionCnt() + value2.getSessionCnt());
                        value1.setPageCnt(value1.getPageCnt() + value2.getPageCnt());
                        value1.setDurTimeSum(value1.getDurTimeSum() + value2.getDurTimeSum());
                        value1.setJumpSessionCnt(value1.getJumpSessionCnt() + value2.getJumpSessionCnt());
                        return value1;
                    }
                },
                new WindowFunction<TrafficSessionBean, TrafficSessionBean, Tuple2<String, String>, TimeWindow>() {
                    @Override
                    public void apply(Tuple2<String, String> stringStringTuple2, TimeWindow window, Iterable<TrafficSessionBean> in, Collector<TrafficSessionBean> out) throws Exception {
                        String stt = DateFormatUtil.toYmdHms(window.getStart());
                        String edt = DateFormatUtil.toYmdHms(window.getEnd());


                        for (TrafficSessionBean sessionBean : in) {
                            sessionBean.setStt(stt);
                            sessionBean.setEdt(edt);
                            sessionBean.setTs(System.currentTimeMillis());
                            out.collect(sessionBean);
                        }
                    }
                }
        );
        //reduceDS.print(">>>>");


        //TODO 13.和维度表  dim_test_paper  关联
        SingleOutputStreamOperator<TrafficSessionBean> withSourceDS = AsyncDataStream.unorderedWait(
                reduceDS,
                new DimAsyncFunction<TrafficSessionBean>("dim_base_source") {
                    @Override
                    public void join(TrafficSessionBean sessionBean, JSONObject dimInfoJsonObj) {
                        sessionBean.setSourceName(dimInfoJsonObj.getString("source_site").toUpperCase());
                    }
                    @Override
                    public String getKey(TrafficSessionBean sessionBean) {
                        return sessionBean.getSourceId();
                    }
                }, 60*10, TimeUnit.SECONDS
        );
        //TODO 13.将结果写入到ck中
        withSourceDS.print(">>>>>");
        withSourceDS.addSink(MyClickhouseUtil.getSinkFunction("insert into dws_traffic_sc_isNew_page_window values(?,?,?,?,?,?,?,?,?,?,?)"));
        env.execute();
    }
}
