package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.bean.UserLoginBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * 用户域用户登陆各窗口汇总表
 * 从 Kafka 页面日志主题读取数据，统计七日回流用户和当日独立用户数。
 * (1) 首先读取dwd_traffic_page_log主题的数据
 * (2) 转换数据结构同时对数据进行过滤，因为这里使用的是是uid,我们需要过滤出带有uid的数据，过滤的条件是
 *          uid != null && last_page_id == null   --> 这个条件是过滤出，一登陆app就自己登录的数据
 *          uid != null && last_page_id == login  --> 这个条件是过滤出，浏览一段页面之后才登录的用户
 * (3) 提取事件时间生成watermark
 * (4) 对数据按照uid进行分组
 * (5) 根据状态计算回流和当日
 * (6) 对数据开窗聚合
 * (7) 将结果写入到clickHouse
 * (8) 执行
 *
 * 数据的流向：
 * mock --> 日志服务器 --> f1.sh --> kafka --> BaseLogApp.class --> kafka --> DwsUserUserLoginWindow.class --> clickHouse
 * @author chenshuaijun
 * @create 2023-03-01 16:54
 */
public class DwsUserUserLoginWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、创建流式执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、从kafka中消费页面的数据
        String topic = "dwd_traffic_page_log";
        DataStreamSource<String> sourcePageDataStream = env.addSource(KafkaUtils.getKafkaConsumer(topic, "DwsUserUserLoginWindow"));
        // TODO 3、转换数据结构，过滤数据
        SingleOutputStreamOperator<JSONObject> transformAndFilterDataStream = sourcePageDataStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                String uid = jsonObject.getJSONObject("common").getString("uid");
                String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                if (uid != null && (lastPageId == null || lastPageId.equals("login"))) {
                    out.collect(jsonObject);
                }
            }
        });
        // TODO 4、提取事件事件生成 watermark
        SingleOutputStreamOperator<JSONObject> waterMarkStream = transformAndFilterDataStream.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                return element.getLong("ts");
            }
        }));

        // TODO 5、对数据按照uid进行分组
        KeyedStream<JSONObject, String> keyedStream = waterMarkStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                return value.getJSONObject("common").getString("uid");
            }
        });

        // TODO 6、根据状态判读是否是当日独立用户，或者是回流用户
        SingleOutputStreamOperator<UserLoginBean> flatMapDataStream = keyedStream.flatMap(new RichFlatMapFunction<JSONObject, UserLoginBean>() {

            private ValueState<String> lastVisitData;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 状态一直不过期
                lastVisitData = getRuntimeContext().getState(new ValueStateDescriptor<String>("last_visit_data", String.class));
            }

            @Override
            public void flatMap(JSONObject value, Collector<UserLoginBean> out) throws Exception {
                // 取出当前数据中携带的事件时间
                Long curTs = value.getLong("ts");
                String curDate = DateFormatUtil.toDate(curTs);
                // 取出状态中的时间
                String statueDate = lastVisitData.value();

                // 定义记录回流和当日的变量
                long curNum = 0L;
                long backNum = 0L;

                if (statueDate == null) {
                    curNum += 1L;
                    lastVisitData.update(curDate);
                } else {
                    if (!curDate.equals(statueDate)) {
                        curNum += 1;
                        lastVisitData.update(curDate);
                        if ((DateFormatUtil.toTs(curDate, false) - DateFormatUtil.toTs(statueDate, false)) >= 8 * 24 * 60 * 60 * 1000L) {
                            backNum += 1;
                        }
                    }
                }

                if (curNum != 0) {
                    UserLoginBean userLoginBean = new UserLoginBean("", "", backNum, curNum, curTs);
                    out.collect(userLoginBean);
                }
            }
        });
        // TODO 7、对数据进行开窗聚合
        SingleOutputStreamOperator<UserLoginBean> reduceDataStream = flatMapDataStream.windowAll(TumblingProcessingTimeWindows.of(Time.seconds(10)))
                .reduce(new ReduceFunction<UserLoginBean>() {
                    @Override
                    public UserLoginBean reduce(UserLoginBean value1, UserLoginBean value2) throws Exception {
                        value1.setUuCt(value1.getUuCt() + value2.getUuCt());
                        value1.setBackCt(value1.getBackCt() + value2.getBackCt());
                        return value1;
                    }
                }, new AllWindowFunction<UserLoginBean, UserLoginBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<UserLoginBean> values, Collector<UserLoginBean> out) throws Exception {
                        UserLoginBean next = values.iterator().next();

                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setTs(System.currentTimeMillis());

                        out.collect(next);
                    }
                });
        reduceDataStream.print(">>>>>>>>>");

        // TODO 8、将数据输出到clickHouse
        reduceDataStream.addSink(ClickHouseUtil.getJdbcSink("insert into dws_user_user_login_window values(?,?,?,?,?)"));

        // TODO 9、执行
        env.execute("DwsUserUserLoginWindow");
    }
}
