package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.bean.TrafficPageViewBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import scala.Tuple4;

import java.time.Duration;

/**
 * 本类的作用是：流量域版本-渠道-地区-访客类别粒度页面浏览各窗口汇总表
 * 首先我们可以看到这个需求是需要四个维度，对应的度量值为本节汇总表中需要有会话数、页面浏览数、浏览总时长、
 * 独立访客数、跳出会话数五个度量字段。
 * 对于会话数、页面浏览数、浏览总时长这三个度量值是可以直接从dwd_traffic_page_log这个topic中求出来的
 *          一个会话的条件是last_page_id == null
 *          页面浏览数：    每来一条数据，页面浏览数+1
 *          浏览总时长：    during_time
 * 对于独立访客数，我们之前在dwd层DwdTrafficUniqueVisitorDetail做过这个需求，只需要从dwd_traffic_unique_visitor_detail
 *          这个topic中取数据，每一条数据表示的是一个独立的访客
 * 对于跳出会话数，我们之前在dwd层DwdTrafficUserJumpDetail做过这个需求，只需要消费dwd_traffic_user_jump_detail
 *          这个topic中取数据，每一条数据表示的是一个跳出会话
 *
 * 因为当前我们采用的数据处理方式是DataStream的方式进行处理，DS对于流的join比较的麻烦，所以我们这里对数据不采用join方式
 * 而是将这三条流进行union，之后对数据进行聚合操作
 *
 * 数据流的分析：
 *                                                                                                        --> DwdTrafficUniqueVisitorDetail.class \
 * mock --> 日志服务器 --> f1.sh --> kafka(topic_log) --> BaseLogAPP.class --> kafka(dwd_traffic_page_log) --> DwsTrafficVcChArIsNewPageViewWindow.class --> clickhouse
 *                                                                                                        --> DwdTrafficUserJumpDetail.class      /
 * @author chenshuaijun
 * @create 2023-03-01 10:29
 */
public class DwsTrafficVcChArIsNewPageViewWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、创建执行环境
        // TODO 1、加载环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、读取三个topic中的数据
        String pageViewTopic = "dwd_traffic_page_log";
        String uniqueVisitorTopic = "dwd_traffic_unique_visitor_detail";
        String userJumpTopic = "dwd_traffic_user_jump_detail";
        DataStreamSource<String> uniqueVisitorDS = env.addSource(KafkaUtils.getKafkaConsumer(uniqueVisitorTopic, "DwsTrafficVcChArIsNewPageViewWindow"));
        DataStreamSource<String> userJumpDS = env.addSource(KafkaUtils.getKafkaConsumer(userJumpTopic, "DwsTrafficVcChArIsNewPageViewWindow"));
        DataStreamSource<String> pageViewDS = env.addSource(KafkaUtils.getKafkaConsumer(pageViewTopic, "DwsTrafficVcChArIsNewPageViewWindow"));
        // TODO 3、转换数据结构
        SingleOutputStreamOperator<TrafficPageViewBean> uniqueVisitorMapDS = uniqueVisitorDS.map(new MapFunction<String, TrafficPageViewBean>() {
            @Override
            public TrafficPageViewBean map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                JSONObject common = jsonObject.getJSONObject("common");
                Long ts = jsonObject.getLong("ts");
                return new TrafficPageViewBean(
                        "",
                        "",
                        common.getString("vc"),
                        common.getString("ch"),
                        common.getString("ar"),
                        common.getString("is_new"),
                        1L, 0L, 0L, 0L, 0L, ts);
            }
        });
        SingleOutputStreamOperator<TrafficPageViewBean> userJumpMapDS = userJumpDS.map(new MapFunction<String, TrafficPageViewBean>() {
            @Override
            public TrafficPageViewBean map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                JSONObject common = jsonObject.getJSONObject("common");
                Long ts = jsonObject.getLong("ts");
                return new TrafficPageViewBean(
                        "",
                        "",
                        common.getString("vc"),
                        common.getString("ch"),
                        common.getString("ar"),
                        common.getString("is_new"),
                        0L, 0L, 0L, 0L, 1L, ts);
            }
        });
        SingleOutputStreamOperator<TrafficPageViewBean> pageViewMapDS = pageViewDS.map(new MapFunction<String, TrafficPageViewBean>() {
            @Override
            public TrafficPageViewBean map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                JSONObject common = jsonObject.getJSONObject("common");
                Long ts = jsonObject.getLong("ts");
                JSONObject page = jsonObject.getJSONObject("page");
                String last_page_id = page.getString("last_page_id");
                long svct = 0L;
                if (last_page_id == null){
                    svct += 1L;
                }
                return new TrafficPageViewBean(
                        "",
                        "",
                        common.getString("vc"),
                        common.getString("ch"),
                        common.getString("ar"),
                        common.getString("is_new"),
                        0L, svct, 1L, page.getLong("during_time"), 0L, ts);
            }
        });

        // TODO 4、将三条流合并
        DataStream<TrafficPageViewBean> unionStream = uniqueVisitorMapDS.union(userJumpMapDS, pageViewMapDS);

        // TODO 5、提取事件时间生成watermark:这里我们watermark的延迟时间是2s,但是由于我们需要等待用户跳出的超时数据，这个数据的窗口是10s,延迟时间是2s，就是12s，我们这里的watermark的延时间设置为2s,所以总共是14s
        SingleOutputStreamOperator<TrafficPageViewBean> watermarkStream = unionStream.assignTimestampsAndWatermarks(WatermarkStrategy.<TrafficPageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(14)).withTimestampAssigner(new SerializableTimestampAssigner<TrafficPageViewBean>() {
            @Override
            public long extractTimestamp(TrafficPageViewBean element, long recordTimestamp) {
                return element.getTs();
            }
        }));

        // TODO 6、对数据进行分组
        KeyedStream<TrafficPageViewBean, Tuple4<String, String, String, String>> keyedStream = watermarkStream.keyBy(new KeySelector<TrafficPageViewBean, Tuple4<String, String, String, String>>() {
            @Override
            public Tuple4<String, String, String, String> getKey(TrafficPageViewBean value) throws Exception {
                return Tuple4.apply(value.getVc(), value.getAr(), value.getCh(), value.getIsNew());
            }
        });
        // TODO 7、对数据进行开窗
        WindowedStream<TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow> windowStream = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10)));
        // TODO 8、对数据进行聚合: 因为我们需要窗口的信息，所以可以使用全窗口函数对数据进行求和，因为增量窗口无法拿到窗口相关的信息
        //          但是由于全窗口函数，延迟太高，所以这里可以使用全窗口函数和增量窗口的结合
        SingleOutputStreamOperator<TrafficPageViewBean> reduceDataStream = windowStream.reduce(new ReduceFunction<TrafficPageViewBean>() {
            @Override
            public TrafficPageViewBean reduce(TrafficPageViewBean value1, TrafficPageViewBean value2) throws Exception {
                value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                value1.setUjCt(value1.getUjCt() + value2.getUjCt());
                value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                return value1;
            }
        }, new WindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow>() {
            @Override
            public void apply(Tuple4<String, String, String, String> key, TimeWindow window, Iterable<TrafficPageViewBean> input, Collector<TrafficPageViewBean> out) throws Exception {
                TrafficPageViewBean reduceResultBean = input.iterator().next();
                reduceResultBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                reduceResultBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                reduceResultBean.setTs(System.currentTimeMillis());
                out.collect(reduceResultBean);
            }
        });

        // TODO 9、将数据写出到clickhouse
        reduceDataStream.print(">>>>>>");

        reduceDataStream.addSink(ClickHouseUtil.getJdbcSink("insert into dws_traffic_vc_ch_ar_is_new_page_view_window values(?,?,?,?,?,?,?,?,?,?,?,?)"));

        env.execute();
    }
}
