package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.bean.TrafficHomeDetailPageViewBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * 流量域页面浏览各窗口汇总表
 * 从 Kafka 页面日志主题读取数据，统计当日的首页和商品详情页独立访客数。
 * 业务处理的逻辑分析
 * （1）从kafka的dwd_traffic_page_log主题中读取数据
 * （2）过滤出page_id = "home" || page_id == "good_detail"的数据，并且转换数据结构为json对象
 * （3）提取事件时间，并且设置watermark
 *  (4) 对数据按照mid进行分组，主要的作用是对数据进行过滤
 *  (5) 存储状态进行首日的判断
 *  （6）开窗 聚合
 *  （7）数据写入到clickHouse
 *  (8) 执行流
 *
 *  数据流向：
 *  mock --> 日志服务器 --> f1.sh --> kafka --> BaseLogApp.class --> kafka --> DwsTrafficPageViewWindows.class --> clickHouse
 * @author chenshuaijun
 * @create 2023-03-01 15:50
 */
public class DwsTrafficPageViewWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、创建流式的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/

        // TODO 2、从kafka相应的主题中消费页面的数据
        String topic = "dwd_traffic_page_log";
        String groupId = "DwsTrafficPageViewWindow";
        DataStreamSource<String> pageDataStream = env.addSource(KafkaUtils.getKafkaConsumer(topic, groupId));
        // TODO 3、转换数据结构，过滤数据
        SingleOutputStreamOperator<JSONObject> transformAndFilterDataStream = pageDataStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                String pageId = jsonObject.getJSONObject("page").getString("page_id");
                if ("home".equals(pageId) || "good_detail".equals(pageId)) {
                    out.collect(jsonObject);
                }
            }
        });
        // TODO 4、提取事件时间生成 watermark
        SingleOutputStreamOperator<JSONObject> waterMarkStream = transformAndFilterDataStream.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                return element.getLong("ts");
            }
        }));
        // TODO 5、将数据按照mid进行分组
        KeyedStream<JSONObject, String> keyedStream = waterMarkStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                return value.getJSONObject("common").getString("mid");
            }
        });

        // TODO 6、对数据进行状态计算
        SingleOutputStreamOperator<TrafficHomeDetailPageViewBean> flatMapDataStream = keyedStream.flatMap(new RichFlatMapFunction<JSONObject, TrafficHomeDetailPageViewBean>() {

            // 本状态记录的是上一次的访问时间
            private ValueState<String> lastHomeVisitDate;
            private ValueState<String> lastdetailVisitDate;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 因为是按日进行计算的，所以这里需要设置状态的生命周期
                ValueStateDescriptor<String> homeStateDescriptor = new ValueStateDescriptor<>("home-descriptor", String.class);
                ValueStateDescriptor<String> detailStateDescriptor = new ValueStateDescriptor<>("detail-descriptor", String.class);
                StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build();
                homeStateDescriptor.enableTimeToLive(ttlConfig);
                detailStateDescriptor.enableTimeToLive(ttlConfig);

                lastHomeVisitDate = getRuntimeContext().getState(homeStateDescriptor);
                lastdetailVisitDate = getRuntimeContext().getState(detailStateDescriptor);
            }


            @Override
            public void flatMap(JSONObject value, Collector<TrafficHomeDetailPageViewBean> out) throws Exception {
                // 首先获取数据中的时间
                long curTS = value.getLong("ts");
                String currentDate = DateFormatUtil.toDate(curTS);
                // 获取上一次访问home页面的状态
                String lastHomeDate = lastHomeVisitDate.value();
                String lastDetailDate = lastdetailVisitDate.value();
                // 记录访问次数
                long homeNum = 0L;
                long detailNum = 0L;
                if ("home".equals(value.getJSONObject("page").getString("page_id"))) {
                    if (lastHomeDate == null || !lastHomeDate.equals(currentDate)) {
                        homeNum += 1;
                        lastHomeVisitDate.update(currentDate);
                    }
                } else {
                       if (lastDetailDate == null || !lastDetailDate.equals(currentDate)) {
                        detailNum += 1;
                        lastdetailVisitDate.update(currentDate);
                    }
                }


                // 数据输出的条件是homeNum和detailNum至少有一个值不为0
                if (homeNum != 0 || detailNum != 0) {
                    out.collect(new TrafficHomeDetailPageViewBean("", "", homeNum, detailNum, curTS));
                }
            }
        });

        // TODO 7、开窗聚合
        SingleOutputStreamOperator<TrafficHomeDetailPageViewBean> reduceDataStream = flatMapDataStream.windowAll(TumblingProcessingTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TrafficHomeDetailPageViewBean>() {
                    @Override
                    public TrafficHomeDetailPageViewBean reduce(TrafficHomeDetailPageViewBean value1, TrafficHomeDetailPageViewBean value2) throws Exception {
                        value1.setHomeUvCt(value1.getHomeUvCt() + value2.getHomeUvCt());
                        value1.setGoodDetailUvCt(value1.getGoodDetailUvCt() + value2.getGoodDetailUvCt());
                        return value1;
                    }
                }, new AllWindowFunction<TrafficHomeDetailPageViewBean, TrafficHomeDetailPageViewBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<TrafficHomeDetailPageViewBean> values, Collector<TrafficHomeDetailPageViewBean> out) throws Exception {
                        TrafficHomeDetailPageViewBean homeDetailPageViewBean = values.iterator().next();

                        homeDetailPageViewBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        homeDetailPageViewBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        homeDetailPageViewBean.setTs(System.currentTimeMillis());

                        out.collect(homeDetailPageViewBean);
                    }
                });

        reduceDataStream.print(">>>>>");

        // TODO 8、将数据写入到click house
        reduceDataStream.addSink(ClickHouseUtil.getJdbcSink("insert into dws_traffic_page_view_window values(?,?,?,?,?)"));

        env.execute("DwsTrafficPageViewWindow");

    }
}
