package com.atguigu.edu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.BeanToJsonStrFunction;
import com.atguigu.edu.realtime.bean.TrafficPageViewBean;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.DorisUtil;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.util.stream.Stream;

/**
 * ClassName: DwsTrafficVcChArIsNewPageViewWindow
 * Package: com.atguigu.edu.realtime.app.dws
 * Description:
 * 版本、渠道、地区、新老访客、来源聚合统计
 * @Author Mr.2
 * @Create 2023/9/9 11:01
 * @Version 1.0
 */
public class DwsTrafficVcChArIsNewScPageViewWindow {
    public static void main(String[] args) {
        // TODO 1. 基本环境准备
        // 1.1 指定流式处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 1.2 设置 并行度
        env.setParallelism(4);

        // TODO 2. 检查点相关设置
        // 2.1 开启检查点; (每间隔10s开启一次检查点) 设置Barrier
        env.enableCheckpointing(10000L, CheckpointingMode.AT_LEAST_ONCE);
        /*
        // 2.2 设置 job取消后, 检查点是否保留
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 2.3 设置 检查点超时时间; --本次采用60s
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        // 2.4 设置 检查点重启策略
        // 参数: 1) 故障重启次数  2) 故障的间隔  3) 与上一次故障重启的时间尝试间隔
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30L),Time.seconds(3L)));
        // 2.5 设置 两个检查点最小时间间隔 --本次设置2s
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        // 2.6 设置 状态后端,即状态的保存位置
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/edu/ck");
        // 2.7 设置 操作HDFS的系统用户
        System.setProperty("HADOOP_USER_NAME", "atguigu");

         */
        // TODO 3. 从 kafka的页面日志topic,读取数据
        // 3.1 指定 消费者主题 和 消费者组
        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_demo_group_001";
        // 3.2 创建 消费者对象
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        // 3.3 消费数据 封装为流; 暂时不指定watermark生成策略
        DataStreamSource<String> kafkaStrDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka-source");

        // TODO 4. 对 流中的数据进行类型转换, JSONString -> JSONObject
        // Note: 目的是 处理起来比较方便
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(
                new MapFunction<String, JSONObject>() {
                    @Override
                    public JSONObject map(String jsonStr) throws Exception {
                        return JSON.parseObject(jsonStr);
                    }
                }
        );
        // For test output
//        jsonObjDS.print("JSONObject->");

        // TODO 5. 按 设备mid 分组
        KeyedStream<JSONObject, String> keyedByJSONObjDS = jsonObjDS.keyBy(
                new KeySelector<JSONObject, String>() {
                    @Override
                    public String getKey(JSONObject jsonObj) throws Exception {
                        return jsonObj.getJSONObject("common").getString("mid");
                    }
                }
        );

        // TODO 6. 将 流中的数据 转换为 统计的实体类对象(思想：类似于wordcount)
        SingleOutputStreamOperator<TrafficPageViewBean> beanStream = keyedByJSONObjDS.process(
                new KeyedProcessFunction<String, JSONObject, TrafficPageViewBean>() {

                    // 声明状态: 新老新老访客统计uvCt, 需要通过flink状态来判断
                    private ValueState<String> lastVisitDateState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        // 状态 初始化
                        ValueStateDescriptor<String> valueStateDescriptor
                                = new ValueStateDescriptor<>("lastVisitDateState", String.class);
                        // 设置状态保留1天
                        valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(1L)).build());
                        lastVisitDateState = getRuntimeContext().getState(valueStateDescriptor);
                    }

                    @Override
                    public void processElement(JSONObject jsonObj, Context ctx, Collector<TrafficPageViewBean> out) throws Exception {
                        // 统计计算前准备, 获取维度信息
                        // "common": "vc"-版本, "ch"-渠道 , "ar"-地区, "sc"-来源, "is_new"-新老访客标记
                        JSONObject commonJsonObj = jsonObj.getJSONObject("common");
                        String vc = commonJsonObj.getString("vc");
                        String ch = commonJsonObj.getString("ch");
                        String ar = commonJsonObj.getString("ar");
                        String sc = commonJsonObj.getString("sc");
                        String isNew = commonJsonObj.getString("is_new");

                        // "page": "page_id"、"last_page_id"
                        JSONObject pageJsonObj = jsonObj.getJSONObject("page");
                        String pageId = pageJsonObj.getString("page_id");
                        String lastPageId = pageJsonObj.getString("last_page_id");

                        // 获取 时间戳
                        Long ts = jsonObj.getLong("ts");
                        // ts -> Date;
                        String curVisitDate = DateFormatUtil.toDate(ts);

                        // 开始 统计计算
                        // 1. 独立访客数 uvCt; 初始化为0
                        long uvCt = 0L;
                        // 从状态中获取当前mid的首次访问日期
                        String lastVisitDate = lastVisitDateState.value();
                        // 上次访问日期为空 或 上次访问日期不等于当前访问日期 --> 代表新访客
                        if (StringUtils.isEmpty(lastVisitDate) || !lastVisitDate.equals(curVisitDate)) {
                            uvCt = 1L;
                            // 更新到状态中
                            lastVisitDateState.update(curVisitDate);
                        }
                        // 2. 会话数 svCt
                        // 判断标准: last_page_id != null --> 代表新会话
                        long svCt = StringUtils.isEmpty(lastPageId) ? 1L : 0L;

                        // 3. 来源数
                        // 我 --> 判断标准: page_id != null -->
                        long scCt = StringUtils.isNotEmpty(pageId) ? 1L : 0L;
                        // 4. 页面浏览数 pvCt; 每来一条数据， 代表1此页面访问;
                        long pvCt = 1L;
                        // 5. 累积访问时长
                        Long durSum = pageJsonObj.getLong("during_time");

                        TrafficPageViewBean pageViewBean = new TrafficPageViewBean(
                                "",
                                "",
                                vc,
                                ch,
                                ar,
                                isNew,
                                sc,
                                "",
                                uvCt,
                                svCt,
                                pvCt,
                                scCt,
                                durSum,
                                ts
                        );

                        // 向下游发送
                        out.collect(pageViewBean);
                    }
                }
        );
        // For test output -> yes
//        beanStream.print("viewBean->");

        // TODO 7. keyBy 之后, 指定watermark生成策略(上次从kafka读取数据的时候没有指定watermark), 以及event time
        // Note that:
        SingleOutputStreamOperator<TrafficPageViewBean> withWatermarkStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<TrafficPageViewBean>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<TrafficPageViewBean>() {
                                    @Override
                                    public long extractTimestamp(TrafficPageViewBean bean, long recordTimestamp) {
                                        return bean.getTs();
                                    }
                                }
                        )
        );

        // TODO 8. 按照统计的维度(版本-vc、渠道-ch、地区-ar、新老访客-is_new、来源-sc) 分组
        KeyedStream<TrafficPageViewBean, Tuple5<String, String, String, String, String>> keyedByBeanStream = withWatermarkStream.keyBy(
                new KeySelector<TrafficPageViewBean, Tuple5<String, String, String, String, String>>() {
                    @Override
                    public Tuple5<String, String, String, String, String> getKey(TrafficPageViewBean pageViewBean) throws Exception {
                        // 重新封装 Tuple5<>,并返回; 获取 vc、ch、ar、is_new、sc、
                        String vc = pageViewBean.getVc();
                        String ch = pageViewBean.getCh();
                        String ar = pageViewBean.getAr();
                        String isNew = pageViewBean.getIsNew();
                        String sc = pageViewBean.getSc();
                        return Tuple5.of(vc, ch, ar, sc, isNew);
                    }
                }
        );

        // TODO 9. 开窗
        WindowedStream<TrafficPageViewBean, Tuple5<String, String, String, String, String>, TimeWindow> windowedStream
                = keyedByBeanStream.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10L)));

        // TODO 10. 聚合统计
        SingleOutputStreamOperator<TrafficPageViewBean> reducedStream = windowedStream.reduce(
                new ReduceFunction<TrafficPageViewBean>() {
                    @Override
                    public TrafficPageViewBean reduce(TrafficPageViewBean value1, TrafficPageViewBean value2) throws Exception {
                        // reduce operator 两两聚合
                        value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                        value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                        value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                        value1.setScCt(value1.getScCt() + value2.getScCt());
                        value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                        // 聚合后 发送
                        return value1;
                    }
                },
                // 对 聚合后的函数，补充时间相关属性
                new WindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple5<String, String, String, String, String>, TimeWindow>() {
                    @Override
                    public void apply(Tuple5<String, String, String, String, String> stringStringStringStringStringTuple5, TimeWindow window, Iterable<TrafficPageViewBean> input, Collector<TrafficPageViewBean> out) throws Exception {
                        // 窗口开始、关闭时间; Doris动态分区时间字段
                        String stt = DateFormatUtil.toYmdHms(window.getStart());
                        String edt = DateFormatUtil.toYmdHms(window.getEnd());
                        String curDate = DateFormatUtil.toDate(window.getStart());

                        for (TrafficPageViewBean viewBean : input) {
                            viewBean.setStt(stt);
                            viewBean.setEdt(edt);
                            viewBean.setCurDate(curDate);

                            // 补充完时间属性, 向下游传递
                            out.collect(viewBean);
                        }
                    }
                }
        );
        // For test output ->
        reducedStream.print("reduce->");

        // TODO 11. 将 聚合的结果写入Doris
        // 数据类型转换: 实体类对象--> JSONString
        reducedStream
                .map(new BeanToJsonStrFunction<TrafficPageViewBean>())
                .sinkTo(DorisUtil.getDorisSink("dws_traffic_vc_ch_ar_sc_is_new_page_view_window"));

        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }

    }
}
