package com.atguigu.edu.realtime220815.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime220815.bean.TrafficPageViewBean;
import com.atguigu.edu.realtime220815.util.ClickHouseUtils;
import com.atguigu.edu.realtime220815.util.DateFormatUtil;
import com.atguigu.edu.realtime220815.util.KafkaUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * @Classname DwsTrafficVcChArIsnewPageViewWindow
 * @Description TODO
 * @Date 2023/2/16 16:50
 * @Created by lzx
 */
public class DwsTrafficVcChArIsnewPageViewWindow {
    public static void main(String[] args) throws Exception {
        /*
        1.执行环境
        2.检查点设置
        3.从Kafka的dwd页面浏览主题中读取数据
        4.对读取的数据进行类型转换 Str -> JSONObject
        5.按照common中的mid进行keyby,目的是计算独立访客
        6.对keyby后的数据进行处理,封装成 TrafficPageViewBean
        7.指定水位线,提取事件时间字段
        8.按照维度分组
        9.开窗
        10.进行聚合计算
        11.将聚合结果写入ClickHouse
         */
        //1.执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        //2.检查点设置
        /*env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.setStateBackend(new HashMapStateBackend());
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(20)));
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/eduRealTime/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/

        //3.从Kafka的dwd页面浏览主题中读取数据
        DataStreamSource<String> pageDS = env.addSource(KafkaUtils.getFlinkKafkaConsumer("dwd_traffic_page_log", "dws_traffic_VcChArIsNew_page_group"));

        //4.对读取的数据进行类型转换 Str -> JSONObject
        SingleOutputStreamOperator<JSONObject> map = pageDS.map(JSON::parseObject);

        //5.按照common中的mid进行keyby,目的是计算独立访客
        KeyedStream<JSONObject, String> keyedStream = map.keyBy(value -> value.getJSONObject("common").getString("mid"));

        /*
        6.对keyby后的数据进行处理,封装成 TrafficPageViewBean , 在这一步中,因为还未进行开窗操作,所以 TrafficPageViewBean中的窗口起止时间
        这两个字段可以先用空字符串赋值,然后因为TrafficPageViewBean类中有独立访客的属性,所以需要使用状态编程
         */
        SingleOutputStreamOperator<TrafficPageViewBean> pageViewBeanDS = keyedStream.process(new KeyedProcessFunction<String, JSONObject, TrafficPageViewBean>() {
            //自定义状态,里面用来存放当前key(也就是mid)的末次登录时间
            private ValueState<String> lastDate = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                //在初始方法中给自定义状态赋初值
                ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("lastDate", String.class);
                valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(1)).build());
                lastDate = getRuntimeContext().getState(valueStateDescriptor);
            }

            @Override
            public void processElement(JSONObject value, KeyedProcessFunction<String, JSONObject, TrafficPageViewBean>.Context ctx, Collector<TrafficPageViewBean> out) throws Exception {
                /*
                processElement方法中处理的是流中数据的具体转换,这里是要从 JSONObject -> TrafficPageViewBean
                value对象就是流中的数据,从value对象中拿出我们封装TrafficPageViewBean需要的各个属性
                 */
                JSONObject common = value.getJSONObject("common");
                String vc = common.getString("vc"); // 版本号
                String ch = common.getString("ch"); // 渠道
                String ar = common.getString("ar"); // 地区
                String isNew = common.getString("is_new"); // 新老访客
                Long ts = value.getLong("ts"); // 时间戳

                //在一条数据到达后,首先获取状态的值,如果状态的值为null或者状态的值与本条数据所在日期不相同,说明此条访问数据为独立访客
                String lastUp = lastDate.value();//获取值状态
                Long uv = 0L; //创建独立访客变量,默认为0
                if (StringUtils.isEmpty(lastUp) || !lastUp.equals(DateFormatUtil.toDate(ts))) {
                    uv = 1L;//确定本条数据为独立访客数据之后,将uv变量重置为1
                    lastDate.update(DateFormatUtil.toDate(ts));//更新状态的值
                }

                //在一条数据到达后,如果该条数据中的page JSONObj里面的last_page_id为null,说明本条数据所访问的页面为会话开始页面
                //sv(session_view)为1,否则为0
                Long sv = 0L;
                JSONObject page = value.getJSONObject("page");
                String lastPageId = page.getString("last_page_id");
                if (lastPageId == null) {
                    sv = 1L;
                }
                Long duringTime = page.getLong("during_time");
                TrafficPageViewBean trafficPageViewBean = new TrafficPageViewBean(
                        "",
                        "",
                        vc,
                        ch,
                        ar,
                        isNew,
                        uv,
                        sv,
                        1L, //pv为page_view页面浏览数,此数据只要来一条page_log就+1
                        duringTime,
                        ts
                );
                out.collect(trafficPageViewBean);
            }
        });
        //pageViewBeanDS.print(); //测试

        //第6步处理完成的数据格式为 : TrafficPageViewBean(stt=, edt=, vc=v2.1.132, ch=Appstore, ar=440000, isNew=0, uvCt=1, svCt=1, pvCt=1, durSum=13965, ts=1692116137000)
        //7.指定水位线,提取事件时间字段,延迟时间为2s
        SingleOutputStreamOperator<TrafficPageViewBean> wmDs = pageViewBeanDS
                .assignTimestampsAndWatermarks(WatermarkStrategy.<TrafficPageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner((e, r) -> e.getTs()));

        //8.根据维度分组,因为有4个维度,所以可以创建四元组来盛放四个维度
        KeyedStream<TrafficPageViewBean, Tuple4<String,String,String,String>> dimKeyedByDS = wmDs.keyBy(new KeySelector<TrafficPageViewBean, Tuple4<String, String, String, String>>() {
            @Override
            public Tuple4<String, String, String, String> getKey(TrafficPageViewBean value) throws Exception {
                return Tuple4.of(value.getVc(), value.getCh(), value.getAr(), value.getIsNew());
            }
        });

        //9.开窗,滚动事件时间窗口,窗口大小为10s
        WindowedStream<TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow> window = dimKeyedByDS.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));

        //10.根据维度属性聚合
        //先累计聚合,再全窗口聚合
        SingleOutputStreamOperator<TrafficPageViewBean> reduce = window.reduce(new ReduceFunction<TrafficPageViewBean>() {
            @Override
            public TrafficPageViewBean reduce(TrafficPageViewBean value1, TrafficPageViewBean value2) throws Exception {
                //分组,开窗,聚合,将相同维度的4个指标累加
                value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                return value1;
            }
        }, new ProcessWindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow>() {
            @Override
            public void process(Tuple4<String, String, String, String> stringStringStringStringTuple4, ProcessWindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow>.Context context, Iterable<TrafficPageViewBean> elements, Collector<TrafficPageViewBean> out) throws Exception {
                //全窗口聚合的主要目的就是将刚才置为空字符串的窗口起止时间赋值为正确的窗口起止时间
                //因为刚才已经进行了累加聚合,所以这里的迭代器里面的值只有最后聚合好的一个值
                TrafficPageViewBean next = elements.iterator().next();
                //通过上下文对象获取窗口具体信息,通过窗口的具体信息来获取该窗口的起止时间
                long start = context.window().getStart();
                long end = context.window().getEnd();
                //赋给next中的起止时间变量
                next.setStt(DateFormatUtil.toYmdHms(start));
                next.setEdt(DateFormatUtil.toYmdHms(end));
                //next中的ts时间戳应该为聚合结束时间，可以使用系统当前时间
                next.setTs(System.currentTimeMillis());
                out.collect(next);
            }
        });
        //reduce.print(">>>");
        //11.将数据写入ClickHouse
        reduce.addSink(ClickHouseUtils
                .getClickHouseSinkFunction("insert into dws_traffic_vc_ch_ar_is_new_page_view_window values(?,?,?,?,?,?,?,?,?,?,?)"));

        env.execute();
    }
}
