package com.zhu.app.dws;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zhu.bean.UserLoginBean;
import com.zhu.config.ClusterParametersConfig;
import com.zhu.config.KafkaTopicConfig;
import com.zhu.utils.ClickHouseUtil;
import com.zhu.utils.DataFormatUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * DWS 层 独立用户和回流用户统计表
 * 过滤数据  用户需要登录  uid 不等于 null,过滤重复数据 last_page_id 为 null 或者 last_page_id = login
 *（1）若状态中的末次登陆日期不为 null，进一步判断。
 * ① 如果末次登陆日期不等于当天日期则独立用户数 uuCt 记为 1，并将状态中的末次
 * 登陆日期更新为当日，进一步判断。
 * a）如果当天日期与末次登陆日期之差大于等于 8 天则回流用户数 backCt 置为 1。
 * b）否则 backCt 置为 0。
 * ② 若末次登陆日期为当天，则 uuCt 和 backCt 均为 0，此时本条数据不会影响统计
 * 结果，舍弃，不再发往下游。
 * （2）如果状态中的末次登陆日期为 null，将 uuCt 置为 1，backCt 置为 0，并将状态
 * 中的末次登陆日期更新为当日。
 */
public class DWSTrafficUserLoginApp {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);  //kafka topic_partition

        //checkPoint
        /*
        streamExecutionEnvironment.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);
        System.setProperty("HADOOP_USER_NAME","zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 600000L);
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));
         */
        //todo 读取kafka 数据源头
        String topic = KafkaTopicConfig.KAFKA_DWD_PAGE_LOG_TOPIC;
        String groupId = "dws_user_user_login_window" + KafkaTopicConfig.KAFKA_GROUP_ID_LAST_NAME;
        DataStreamSource<String> kafkaPageDStream = streamExecutionEnvironment.addSource(ZhuKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

        //todo 过滤数据
        /*
        数据格式
        {"common":{"ar":"230000","uid":"10","os":"Android 10.0","ch":"oppo","is_new":"0","md":"vivo iqoo3",
        "mid":"mid_475408","vc":"v2.1.134","ba":"vivo"},
        "page":{"page_id":"comment","item":"13","during_time":13516,"item_type":"sku_id",
        "last_page_id":"good_spec","source_type":"recommend"},"ts":1680266306000}
        1. uid 不等于空 保证是一个用户
        2. last_page_id = null 首页登录  last_page_id = login 登录界面登录 (中间登录)
         */
        SingleOutputStreamOperator<JSONObject> filterLoginUserDStream = kafkaPageDStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String logData, Collector<JSONObject> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(logData);
                //uid != null  common
                JSONObject common = jsonObject.getJSONObject("common");
                String uid = common.getString("uid");
                if (uid != null) {
                    //过滤登录数据
                    JSONObject page = jsonObject.getJSONObject("page");
                    String lastPageId = page.getString("last_page_id");
                    if (lastPageId == null || "login".equals(lastPageId)) {
                        collector.collect(jsonObject);
                    }
                }
            }
        });

        //todo  设置水位线 WaterMark
        SingleOutputStreamOperator<JSONObject> filterLoginUserWithWaterMarkDStream 
                = filterLoginUserDStream.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)
        ).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject jsonObject, long l) {
                return jsonObject.getLong("ts");
            }
        }));
        
        //todo 按照用户分组 生成实体类
        //每个用户为一组 一条用户可能有多条数据
        KeyedStream<JSONObject, String> keyedByUidLoginDStream = filterLoginUserWithWaterMarkDStream.keyBy(jsonObject -> jsonObject.getJSONObject("common").getString("uid"));

        // TODO 状态编程，保留回流页面浏览记录和独立用户登陆记录
        // state 每个key分区的每条数据 封装成实体类  独立用户  回流用户
        SingleOutputStreamOperator<UserLoginBean> pocessUUAndBackUserDStream = keyedByUidLoginDStream.process(new KeyedProcessFunction<String, JSONObject, UserLoginBean>() {

            // state ? 来一条数据 上次访问的时间  如果 有 且大于 7 天 回流用户 写入状态,如果有 且是今天 舍弃数据,如果有 是昨天，独立用户.更新状态
            private ValueState<String> userLastLoginState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> loginStateDescriptor = new ValueStateDescriptor<String>("user-lastLogin-state", String.class);
                userLastLoginState = getRuntimeContext().getState(loginStateDescriptor);
                //parameters.setString("state.exec.ttl",24 * 60 * 60 * 1000L);

            }

            @Override
            public void processElement(JSONObject jsonObject, KeyedProcessFunction<String, JSONObject, UserLoginBean>.Context context, Collector<UserLoginBean> collector) throws Exception {
                String loginTime = DataFormatUtil.toYmdHms(jsonObject.getLong("ts"));
                String lastLoginTime = userLastLoginState.value();

                //封装结果 uuct 独立 backCt 回流
                UserLoginBean userLoginBean = new UserLoginBean();
                if (lastLoginTime != null) {
                    if (!lastLoginTime.equals(loginTime)) {
                        //如果是今天 舍弃 -> 不是今天
                        userLoginBean.setUuCt(1L);
                        if (DataFormatUtil.toTs(loginTime,true) - DataFormatUtil.toTs(lastLoginTime,true) >= 8 * 24 * 60 * 60 * 1000L) {
                            userLoginBean.setBackCt(1L);
                        } else {
                            userLoginBean.setBackCt(0L);
                        }
                        userLastLoginState.update(loginTime);
                        collector.collect(userLoginBean);

                    }
                } else {
                    userLoginBean.setUuCt(1L);
                    userLoginBean.setBackCt(0L);
                    userLastLoginState.update(loginTime);
                    collector.collect(userLoginBean);

                }
            }
        });

        //todo 开窗 聚合
        AllWindowedStream<UserLoginBean, TimeWindow> userLoginBeanAllWindowedStream = pocessUUAndBackUserDStream.windowAll(TumblingEventTimeWindows.of(Time.seconds(10)));

        SingleOutputStreamOperator<UserLoginBean> resultDStream = userLoginBeanAllWindowedStream.reduce(
                new ReduceFunction<UserLoginBean>() {
                    @Override
                    public UserLoginBean reduce(UserLoginBean value1, UserLoginBean value2) throws Exception {
                        //独立用户和回流用户聚合
                        value1.setUuCt(value1.getUuCt() + value2.getUuCt());
                        value1.setBackCt(value1.getBackCt() + value2.getBackCt());
                        return value1;
                    }
                },
                new AllWindowFunction<UserLoginBean, UserLoginBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow timeWindow, Iterable<UserLoginBean> iterable, Collector<UserLoginBean> collector) throws Exception {

                        //补充时间字段
                        String start = DataFormatUtil.toYmdHms(timeWindow.getStart());
                        String end = DataFormatUtil.toYmdHms(timeWindow.getEnd());
                        long ts = System.currentTimeMillis();
                        UserLoginBean loginBean = iterable.iterator().next();
                        loginBean.setStt(start);
                        loginBean.setEdt(end);
                        loginBean.setTs(ts);
                        collector.collect(loginBean);
                    }
                }
        );

        //todo 写入ClickHouse
        resultDStream.print(">>>>>");
        resultDStream.addSink(ClickHouseUtil.getClickHouseSinkFunction("insert into dws_user_user_login_window values(?,?,?,?,?)"));
        //todo execute job
        streamExecutionEnvironment.execute("DWSTrafficUserLoginApp");

    }
}
