package com.atguigu.app.dwm;

import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.util.MyKafkaUtil;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Date;

/**
 * 统计UV
 **/
//数据流：web/app -> Nginx -> SpringBoot -> Kafka(ods) -> FlinkApp -> Kafka(dwd) -> FlinkApp -> Kafka(dwm)
//程  序：mockLog -> Nginx -> Logger.sh  -> Kafka(ZK)  -> BaseLogApp -> kafka -> UniqueVisitApp -> Kafka
public class UniqueViewApp
{
    public static void main(String[] args) throws Exception
    {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //1.1 设置CK&状态后端
        //env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall-flink-210325/ck"));
        //env.enableCheckpointing(5000L);
        //env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //env.getCheckpointConfig().setCheckpointTimeout(10000L);
        //env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000);

        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart());

        String dwdTopic = "dwd_page_log";
        String dwmTopic = "dwm_unique_visit";
        String groupId = "base_log_app_210325";
        DataStream<JSONObject> jsonStream = env
                // 消费 kafka
                .addSource(MyKafkaUtil.getKafkaConsumer(dwdTopic, groupId))
                // 类型转换
                .map(JSON::parseObject)
                // 分组
                .keyBy(data -> data.getJSONObject("common").getString("mid"))
                .filter(new RichFilterFunction<JSONObject>()
                {
                    // 存放上一次记录时间 yyyy-MM-dd
                    ValueState<String> dateState;

                    @Override
                    public void open(Configuration parameters) throws Exception
                    {
                        ValueStateDescriptor<String> stateProperties = new ValueStateDescriptor<String>("state-date", String.class);
                        // 设置 TTL 超时时间
                        StateTtlConfig ttlConfig = StateTtlConfig
                                // 设置超时时间：1days
                                .newBuilder(Time.days(1))
                                // 设置更新策略：create|write
                                .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                                .build();
                        stateProperties.enableTimeToLive(ttlConfig);
                        dateState = getRuntimeContext().getState(stateProperties);
                    }

                    @Override
                    public boolean filter(JSONObject value) throws Exception
                    {
                        if (null == value.getJSONObject("page").getString("last_page_id"))
                        {
                            if (!DateUtil.formatDate(new Date(value.getLong("ts"))).equals(dateState.value()))
                            {
                                dateState.update(DateUtil.formatDate(new Date(value.getLong("ts"))));
                                return true;
                            }
                        }
                        return false;
                    }
                });

        // 输出 Kafka
        jsonStream.print("result");
        jsonStream.map(JSONUtil::toJsonStr).addSink(MyKafkaUtil.getKafkaProducer(dwmTopic));

        env.execute("UniqueViewApp");
    }
}
