package com.atguigu.gmallrealtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.PropertyNamingStrategy;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.atguigu.gmallrealtime.bean.UserLoginBean;
import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.DateFormatUtil;
import com.atguigu.gmallrealtime.util.DorisUtil;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;

import java.util.Date;

/**
 * @author yhm
 * @create 2023-10-09 15:09
 */
public class DwsUserUserLoginWindow {
    public static void main(String[] args) throws Exception {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        // TODO 2 添加检查点和状态后端
        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);

        //2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 设置job取消之后 检查点是否保留
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 设置两个检查点之间最小的时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));

        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");

        System.setProperty("HADOOP_USER_NAME","atguigu");

        // TODO 3 读取dwd_traffic_page主题数据
        String groupId = "dws_user_user_login_window";
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE, groupId);
        DataStreamSource<String> pageSource = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "page_source");

        // TODO 4 转换过滤 过滤出uid不为空 同时是登录的第一条数据
        SingleOutputStreamOperator<JSONObject> jsonStream = pageSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                String uid = jsonObject.getJSONObject("common").getString("uid");
                String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                if (uid != null && (lastPageId == null || lastPageId.equals("login"))) {
                    out.collect(jsonObject);
                }
            }
        });
//        jsonStream.print();

        // TODO 5 根据uid分组
        // flink的keyBy字段需要全部过滤不能为null
        KeyedStream<JSONObject, String> keyedStream = jsonStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                return value.getJSONObject("common").getString("uid");
            }
        });

        // TODO 6 使用状态编程 判断是否为独立访客 回流用户 转换为javaBean
        SingleOutputStreamOperator<UserLoginBean> beanStream = keyedStream.flatMap(new RichFlatMapFunction<JSONObject, UserLoginBean>() {
            ValueState<String> lastLoginDtState = null;
            @Override
            public void open(Configuration parameters) throws Exception {
                lastLoginDtState = getRuntimeContext().getState(new ValueStateDescriptor<String>("last_login_dt", String.class));
            }

            @Override
            public void flatMap(JSONObject value, Collector<UserLoginBean> out) throws Exception {
                Long ts = value.getLong("ts");
                String curDt = DateFormatUtil.toDate(ts);
                String dorisDate = DateFormatUtil.toDorisDate(ts);
                String lastLoginDt = lastLoginDtState.value();
                // 回流用户数
                Long backCt = 0L;
                // 独立用户数
                Long uuCt = 0L;

                if (lastLoginDt == null) {
                    // 把状态为空的数据判断为新用户
                    uuCt = 1L;
                    lastLoginDtState.update(curDt);
                } else {
                    // 状态不为空
                    Long lastLoginTs = DateFormatUtil.toTs(lastLoginDt);
                    if (ts - lastLoginTs >= Constant.ONE_DAY * 1000) {
                        // 大于1天
                        uuCt = 1L;
                        lastLoginDtState.update(curDt);
                        if (ts - lastLoginTs >= 8 * Constant.ONE_DAY * 1000) {
                            backCt = 1L;
                        }
                    }
                }

                if (backCt != 0L || uuCt != 0L){
                    out.collect(new UserLoginBean("","",dorisDate,backCt,uuCt,ts));
                }
            }
        });

//        beanStream.print();

        // TODO 7 注册水位线
        // TODO 8 开窗
        // TODO 9 聚合
        SingleOutputStreamOperator<UserLoginBean> reduceStream = beanStream.assignTimestampsAndWatermarks(WatermarkStrategy.<UserLoginBean>forMonotonousTimestamps().withTimestampAssigner(new SerializableTimestampAssigner<UserLoginBean>() {
            @Override
            public long extractTimestamp(UserLoginBean element, long recordTimestamp) {
                return element.getTs();
            }
        }))
                .windowAll(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10L))).allowedLateness(org.apache.flink.streaming.api.windowing.time.Time.seconds(5L))
                .reduce(new ReduceFunction<UserLoginBean>() {
                    @Override
                    public UserLoginBean reduce(UserLoginBean value1, UserLoginBean value2) throws Exception {
                        value1.setBackCt(value1.getBackCt() + value2.getBackCt());
                        value1.setUuCt(value1.getUuCt() + value2.getUuCt());
                        return value1;
                    }
                }, new ProcessAllWindowFunction<UserLoginBean, UserLoginBean, TimeWindow>() {
                    @Override
                    public void process(Context context, Iterable<UserLoginBean> elements, Collector<UserLoginBean> out) throws Exception {
                        TimeWindow window = context.window();
                        String stt = DateFormatUtil.toYmdHms(window.getStart());
                        String edt = DateFormatUtil.toYmdHms(window.getEnd());
                        for (UserLoginBean element : elements) {
                            element.setStt(stt);
                            element.setEdt(edt);
                            out.collect(element);
                        }

                    }
                });


        // TODO 10 写出到doris
        reduceStream
                .map(new MapFunction<UserLoginBean, String>() {
                    @Override
                    public String map(UserLoginBean value) throws Exception {
                        SerializeConfig config = new SerializeConfig();
                        config.setPropertyNamingStrategy(PropertyNamingStrategy.SnakeCase);
                        return JSON.toJSONString(value, config);
                    }
                })
                .sinkTo(DorisUtil.getDorisSink("dws_user_user_login_window"));

        // TODO 11 执行任务
        env.execute();
    }
}
