package com.atguigu.edu.app.dws;

import bean.UserUserBack;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import util.DateFormatUtil;
import util.MyClickhouseUtil;
import util.MyKafkaUtil;

import java.time.Duration;

public class DwsUserUserBackWindow {
    public static void main(String[] args) throws Exception {
        //1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //2.检查点相关设置
       /* env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, org.apache.flink.api.common.time.Time.days(30), org.apache.flink.api.common.time.Time.seconds(3)));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://39.101.79.248:8020//edu_realtime/checkpoint");
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        //3.从kafka的页面主题中读取数据
        String topic = "dwd_traffic_start_log";
        String groupId = "dwd_user_user_back_group";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaConsumer(topic, groupId));
        //4.转换数据类型 jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String jsonStr) throws Exception {
                return JSON.parseObject(jsonStr);
            }
        });
       // jsonObjDS.print("11");
        //5.过滤登录行为(一条启动消息就是一条登录行为，过滤掉首次启动的数据)
        SingleOutputStreamOperator<JSONObject> filterDS = jsonObjDS.filter(new FilterFunction<JSONObject>() {
            @Override
            public boolean filter(JSONObject jsonObj) throws Exception {
                //取出first_open字段值，1为首次启动，0为非首次启动
                String firstOpen = jsonObj.getJSONObject("start").getString("first_open");
                String uid = jsonObj.getJSONObject("common").getString("uid");
                return "0".equals(firstOpen) && StringUtils.isNotEmpty(uid);
            }
        });
        //6.指定水位线生成策略和提取时间戳字段
        SingleOutputStreamOperator<JSONObject> withWatermarkDS = jsonObjDS.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject jsonObj, long recordTimestamp) {
                        return jsonObj.getLong("ts");
                    }
                }));
        // withWatermarkDS.print("22");
        //7.按照uid分组
        KeyedStream<JSONObject, String> keyedDS = withWatermarkDS.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject jsonObj) throws Exception {
                return jsonObj.getJSONObject("common").getString("uid");
            }
        });
        //7.使用状态编程统计回流用户（转为实体类对象）
        SingleOutputStreamOperator<UserUserBack> userBackBeanDS = keyedDS.process(new KeyedProcessFunction<String, JSONObject, UserUserBack>() {
            private ValueState<Long> lastLoginDate;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<Long> valueStateDescriptor = new ValueStateDescriptor<>("lastLoginDate", Long.class);
                lastLoginDate = getRuntimeContext().getState(valueStateDescriptor);
            }

            @Override
            public void processElement(JSONObject jsonObj, KeyedProcessFunction<String, JSONObject, UserUserBack>.Context ctx, Collector<UserUserBack> out) throws Exception {
                //判断状态是不是为空，如果状态为空，则不属于回流用户，并更新状态
                //如果状态中不为空，取出(本次的登录的ts-上次的登录日期的ts)/1000/60/24，如果大于8，则属于回流用户，否则不属于
                Long lastTs = lastLoginDate.value();
                Long curTs = jsonObj.getLong("ts");
                UserUserBack userBackBean = new UserUserBack(
                        "",
                        "",
                        0L,
                        0L
                );
                if (lastTs == null) {
                    lastLoginDate.update(curTs);
                } else {
                    if ((curTs - lastTs) / 1000 / 60 / 60 / 24 >= 8) {
                        userBackBean.setBack(1L);
                    }
                    lastLoginDate.update(curTs);
                }
                out.collect(userBackBean);
            }
        });
       // userBackBeanDS.print("3");
        //8.开窗
        AllWindowedStream<UserUserBack, TimeWindow> windowDS = userBackBeanDS.windowAll(TumblingEventTimeWindows.of(Time.seconds(10)));
        //9.聚合
        SingleOutputStreamOperator<UserUserBack> reduceDS = windowDS.reduce(
                new ReduceFunction<UserUserBack>() {
                    @Override
                    public UserUserBack reduce(UserUserBack value1, UserUserBack value2) throws Exception {
                        value1.setBack(value1.getBack() + value2.getBack());
                        return value1;
                    }
                },
                new AllWindowFunction<UserUserBack, UserUserBack, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<UserUserBack> input, Collector<UserUserBack> out) throws Exception {
                        for (UserUserBack userUserBack : input) {
                            userUserBack.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                            userUserBack.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                            userUserBack.setTs(System.currentTimeMillis());
                            out.collect(userUserBack);
                        }
                    }
                }
        );
        //10.将结果写到clickhouse
        reduceDS.print("4");
        reduceDS.addSink(MyClickhouseUtil.getSinkFunction("insert into dws_user_user_back_window values(?,?,?,?)"));
        env.execute();
    }
}
