package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TrafficHomeDetailPageViewBean;
import com.atguigu.utils.DateFormatUtil;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MyClickHouseUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;


/*
10.3 流量域页面浏览各窗口汇总表
10.3.1 主要任务
从 Kafka 页面日志主题读取数据，统计当日的首页和商品详情页独立访客数。
（一个设备mid一天多次访问首页，算作一次，首页的独立访客数即为：一天有多少个设备mid访问过首页）

 */
//todo 1.创建环境
//todo 2.读取kafka dwd层页面日志数据，并将数据转为jsonObject（后面好提取事件时间）
//todo 3.过滤出首页和商品详情页的数据
//todo 4.设置事件时间和watermark
//todo 5.按照mid分组
//todo 6.定义两个状态，对相同mid的每天第一条访问首页的数据打标记为1表示当日首页独立访客（状态编程），非第一条数据打标记为0，商品详情页独立访客同理并转换为javabean
//todo 7.开窗聚合
//todo 8.写到clickhouse
public class Dws03TrafficHomeDetailPageViewBean {
    public static void main(String[] args) throws Exception {
        //todo 1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //todo 生产环境一定要写，测试注释掉，否则每次测试都得开hdfs
//        需要从checkpoint或savepoint启动程序
//        //2.1 开启checkpoint，每隔5s钟做一次ck，并指定ck的一致性语义
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);//exactly once：默认barrier对齐
//        //2.2 设置超时时间为1min
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);//设置超时时间设置checkpoint的超时时间为1min，是指做一次checkpoint的时间；如果超时则认为本次checkpoint失败，这个checkpoint就丢了，继续一下一次checkpoint即可
//        //2.3设置两次重启的最小时间间隔为3s
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        //2.4设置任务关闭的时候保留最后一次ck数据
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        //2.5 指定从ck自动重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1L),Time.minutes(1L)
//        ));
//        //2.6 设置状态后端
//        env.setStateBackend(new HashMapStateBackend());//本地状态位置
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/flinkCDC/220828"
//        );//checkpoint状态位置
//        //2.7 设置访问HDFS的用户名
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        //todo 2.读取kafka dwd层页面日志数据，并将数据转为jsonObject（后面好提取事件时间）
        SingleOutputStreamOperator<JSONObject> pageJsonObjDS = env.addSource(KafkaUtil.getFlinkKafkaConsumer("dwd_traffic_page_log", "home_detail_pageview_220828"))
                .map(new MapFunction<String, JSONObject>() {
                    @Override
                    public JSONObject map(String value) throws Exception {
                        return JSON.parseObject(value);
                    }
                });

        //todo 3.过滤出首页和商品详情页的数据(2、3可以合并用flatmap实现)
        SingleOutputStreamOperator<JSONObject> filteredStream = pageJsonObjDS.filter(new FilterFunction<JSONObject>() {
            @Override
            public boolean filter(JSONObject value) throws Exception {
                String pageId_Value = value.getJSONObject("page").getString("page_id");

                return ("home".equals(pageId_Value) || "good_detail".equals(pageId_Value));
            }
        });


        //todo 4.设置事件时间和watermark
        SingleOutputStreamOperator<JSONObject> withWatermarkStream = filteredStream.assignTimestampsAndWatermarks(
                WatermarkStrategy

                        .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                return element.getLong("ts");
                            }
                        })
        );

        //todo 5.按照mid分组
        KeyedStream<JSONObject, String> keyByMidStream = withWatermarkStream.keyBy(s -> s.getJSONObject("common").getString("mid"));

        //todo 6.对相同mid的每天第一条数据打标记为1表示独立访客（状态编程），非第一条数据打标记为0，并转换为javabean
        //需求二用的richmapfunction，我们这里可以用richmapfunction或richflatmapfunction过滤掉一些不需要的数据，这里用更底层的process来实现一下相同的功能
        SingleOutputStreamOperator<TrafficHomeDetailPageViewBean> javabeanDS = keyByMidStream.process(new KeyedProcessFunction<String, JSONObject, TrafficHomeDetailPageViewBean>() {

            //todo 要求首页的独立访客数和商品详情页独立访客数，就需要同一个mid下定义两个状态分别存放上一次首页访问的日期和
            // 上一次商品详情页的访问日期，来判断是否是首页的独立访客或商品详情页独立访
            private ValueState<String> homeLastVisitDtState;
            private ValueState<String> goodDetailLastVisitDtState;

            @Override
            public void open(Configuration parameters) throws Exception {
                //todo 给状态注册名字，还可以给状态设置状态描述器，配置进其他信息
                StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build();

                ValueStateDescriptor<String> homeStateDescriptor = new ValueStateDescriptor<>("home_last_visit_dt", String.class);
                homeStateDescriptor.enableTimeToLive(ttlConfig);//将ttl设置到状态描述器里
                homeLastVisitDtState = getRuntimeContext().getState(homeStateDescriptor);

                ValueStateDescriptor<String> pageListStateDescriptor = new ValueStateDescriptor<>("detail_last_visit_dt", String.class);
                pageListStateDescriptor.enableTimeToLive(ttlConfig);//将ttl设置到状态描述器里
                goodDetailLastVisitDtState = getRuntimeContext().getState(pageListStateDescriptor);
            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<TrafficHomeDetailPageViewBean> out) throws Exception {
                //todo 获取状态
//                String homeLastDt = homeLastVisitDtState.value();//不能在这里获取，否则
//                String goodDetailLastDt = goodListLastVisitDtState.value();

                //todo 获取今天的时间
                Long ts = value.getLong("ts");
                String curDt = DateFormatUtil.toDate(ts);

                //todo 对比状态
                long homeCt = 0L;//每来一条数据，javabean的该属性赋值为0，只有当该mid第一次访问首页，才赋值为1
                long goodDetailCt = 0L;

                //来的数据page_id不是home就是good_detail页，要分别取出各自的页，来判断
                String pageId = value.getJSONObject("page").getString("page_id");

//                System.out.println("pageId>>>>" + pageId);

                if ("home".equals(pageId)) {
                    String homeLastDt = homeLastVisitDtState.value();
                    if (homeLastDt == null || curDt.compareTo(homeLastDt) > 0) {
//                    if (homeLastDt == null || !homeLastDt.equals(curDt)) {
                        //说明今天是该mid第一次访问首页
                        homeCt = 1L;
                        homeLastVisitDtState.update(curDt);
                    }
                } else if ("good_detail".equals(pageId)) {
                    String goodDetailLastDt = goodDetailLastVisitDtState.value();
                    if (goodDetailLastDt == null || curDt.compareTo(goodDetailLastDt) > 0) {
                        //说明今天是该mid第一次访问商品详情页
                        goodDetailCt = 1L;
                        goodDetailLastVisitDtState.update(curDt);
                    }
                }


                if (homeCt == 1L || goodDetailCt == 1L) {//过滤了都为0的数据，这样传给下游的数据就可以减少一些了
                    out.collect(new TrafficHomeDetailPageViewBean(
                                    "",
                                    "",
                                    homeCt,
                                    goodDetailCt,
                                    null//因为watermark是在这之前就提取了，所以ts就不用保留事件时间了，下游直接赋值系统时间为版本号即可
                            )
                    );
                }
            }
        });

//        javabeanDS.print("javabeanDS>>>>>>>");


        //todo 7.开窗聚合
        AllWindowedStream<TrafficHomeDetailPageViewBean, TimeWindow> windowAllStream = javabeanDS.windowAll(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));

        SingleOutputStreamOperator<TrafficHomeDetailPageViewBean> reducedStream = windowAllStream.reduce(new ReduceFunction<TrafficHomeDetailPageViewBean>() {
            @Override
            public TrafficHomeDetailPageViewBean reduce(TrafficHomeDetailPageViewBean value1, TrafficHomeDetailPageViewBean value2) throws Exception {
                value1.setHomeUvCt(value1.getHomeUvCt() + value2.getHomeUvCt());
                value1.setGoodDetailUvCt(value1.getGoodDetailUvCt() + value2.getGoodDetailUvCt());
                return value1;
            }
        }, new AllWindowFunction<TrafficHomeDetailPageViewBean, TrafficHomeDetailPageViewBean, TimeWindow>() {
            @Override
            public void apply(TimeWindow window, Iterable<TrafficHomeDetailPageViewBean> values, Collector<TrafficHomeDetailPageViewBean> out) throws Exception {
                //实际上不用遍历取出，迭代器里只有一条数据
                for (TrafficHomeDetailPageViewBean value : values) {
                    value.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                    value.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                    value.setTs(System.currentTimeMillis());
                    out.collect(value);
                }
            }
        });
        reducedStream.print("即将写到clickhouse的数据");

        //todo 8.写到clickhouse
        reducedStream.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_traffic_page_view_window values(?,?,?,?,?)"));

        //todo 9.启动任务
        env.execute();

    }
}
