package cn.kgc.gmall.app.dwm;

import cn.kgc.gmall.utils.MyKafkaUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.text.SimpleDateFormat;

/**
 * 独立的访客计算
 * 1.消费Kafka中dwd_topic_page数据
 * 2.根据设备id(mid)分组
 * 3.过滤出不符合的数据:
 *      3.1.同一天
 *      3.2.登录很多次去重
 *
 */
public class UniqueVisitApp {
    public static void main(String[] args) throws Exception {
        // 1.运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        // 2.检查点相关设置
        env.enableCheckpointing(5*1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        env.getCheckpointConfig()
                .enableExternalizedCheckpoints(
                        CheckpointConfig
                                .ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
                );
        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall/flink/checkpoint"));
        System.setProperty("HADOOP_USER_NAME","atkgc");

        // 消费的主题
        String topic = "dwd_topic_page";
        String group = "dwd_page_group";
        DataStreamSource<String> pageDS = env.addSource(MyKafkaUtils.getKafkaConsumerSource(topic, group));
        // 展示数据
        // 启动日志服务器nginx  生成数据的jar  启动BaseLogApp UniqueVisitApp
        //pageDS.print();

        // 转化为json字符串
        SingleOutputStreamOperator<JSONObject> map = pageDS.map(JSON::parseObject);
        // 根据mid进行分组  每个组代表每一台设备
        KeyedStream<JSONObject, String> keyBy = map.keyBy(x -> x.getJSONObject("common").getString("mid"));

        // 过滤  每一台设备 相同的登录 同同一天的登录
        SingleOutputStreamOperator<JSONObject> filter = keyBy.filter(new RichFilterFunction<JSONObject>() {
            // 每一个独立的用户存入状态变量中
            // 按照时间做判断
            // 时间格式化
            SimpleDateFormat format = null;
            // 登录时间
            private ValueState<String> valueState = null;


            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                format = new SimpleDateFormat("yyyy-MM-dd");
                ValueStateDescriptor<String> time_state = new ValueStateDescriptor<>("time_state", String.class);
                //  状态变量中的数据做每日独立用户统计
                StateTtlConfig stateTtlConfig = StateTtlConfig
                        .newBuilder(Time.days(1L))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite) // 根据创建的时间和写入的时间设置状态变量过期
                        .build();
                // 激活状态变量的生存时间
                time_state.enableTimeToLive(stateTtlConfig);

                valueState = getRuntimeContext().getState(time_state);
            }

            @Override
            public boolean filter(JSONObject jsonObj) throws Exception {
                String lastPageId = jsonObj.getJSONObject("page").getString("last_page_id");
                String ts = format.format(jsonObj.getLong("ts"));
                // 1.判断是否有last_page_id  如果有证明是从上一个页面过来的说明并不是第一次访问
                if (lastPageId != null && lastPageId.length() > 0) {
                    return false;
                } else {
                    // 2.判断用户是否是今天访问
                    if (valueState.value() != null && valueState.value().length() > 0 && valueState.value().equals(ts)) {
                        return false;
                    } else {
                        valueState.update(ts);

                        return true;
                    }

                }
            }
        })
                // 指定运行的uid
                .uid("uvFilter");


        // 往控制台输出  获取协会到dwm层  dwm_unique_visit
        filter
                // 转化为字符串
                .map(JSON::toString)
                // 写入到kafka中
                .addSink(MyKafkaUtils.getKafkaProducerSink("dwm_unique_visit"));
        env.execute();
    }
}
