package com.sinozo.data.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.sinozo.data.bean.key.PageKeyByBean;
import com.sinozo.data.common.ConfigConstant;
import com.sinozo.data.common.DataConfig;
import com.sinozo.data.utils.DateFormatUtil;
import com.sinozo.data.utils.MyKafkaUtil;
import com.sinozo.data.utils.PathUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.concurrent.TimeUnit;

/**
 * @Description: 流量域独立访客事务事实表
 * <p>
 * 输出UV计数
 * </p>
 * @Author: zhaoyunbao
 * @CreateDate: 2023/5/5 14:51
 * @Version: 1.0
 */
public class DwdTrafficUniqueVisitorDetail {

    public static void main(String[] args) {
        Logger logger = LoggerFactory.getLogger(DwdTrafficUniqueVisitorDetail.class);
        try {
            //1.获取执行环境
            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setParallelism(DataConfig.KAFKA_PARTITION_NUM);

            //1.1 开启CheckPoint
            env.enableCheckpointing(DataConfig.FLINK_CHECKPOINT_INTERVAL, CheckpointingMode.EXACTLY_ONCE);
            env.getCheckpointConfig().setCheckpointTimeout(DataConfig.FLINK_CHECKPOINT_TIMEOUT);
            env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);

            //1.2 设置状态后端
            env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
            env.getCheckpointConfig().setExternalizedCheckpointCleanup(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
            );
            env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(3L, TimeUnit.DAYS), Time.of(1L, TimeUnit.MINUTES)));

            String hdfsPath = DataConfig.HDFS_SERVER;
            env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage(PathUtil.getHdfsPath(hdfsPath, "dwdTrafficUniqueVisitorDetail")));
            env.setStateBackend(new EmbeddedRocksDBStateBackend());

            //2.读取Kafka 页面日志主题创建流
            String topic = ConfigConstant.KAFKA_PAGE_LOG_TOPIC;
            String groupId = ConfigConstant.KAFKA_UNIQUE_VISITOR_DETAIL_GROUP_ID;

            DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

            //3.过滤掉上一跳页面不为null的数据并将每行数据转换为JSON对象
            SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
                @Override
                public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                    try {
                        JSONObject jsonObject = JSON.parseObject(value);
                        out.collect(jsonObject);
                    } catch (Exception e) {
                        logger.error("Error parsing", e);
                    }
                }
            });

            //4.使用状态编程实现按照device_id和subEvent的去重
            SingleOutputStreamOperator<JSONObject> uvDS = jsonObjDS.keyBy(json -> {
                String deviceId = json.getJSONObject("common").getString("device_id");
                String code = json.getJSONObject("common").getString("code");
                String subEvent = json.getJSONObject("page").getString("sub_event");
                return PageKeyByBean.builder().deviceId(deviceId).code(code).subEvent(subEvent).build();
            }).filter(new RichFilterFunction<JSONObject>() {
                private ValueState<String> lastVisitState;

                @Override
                public void open(Configuration parameters) throws Exception {
                    ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("last-visit", String.class);
                    //设置状态的TTL
                    StateTtlConfig ttlConfig = new StateTtlConfig.Builder((Time.days(1)))
                            .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                            .build();
                    stateDescriptor.enableTimeToLive(ttlConfig);
                    lastVisitState = getRuntimeContext().getState(stateDescriptor);
                }

                @Override
                public boolean filter(JSONObject value) throws Exception {
                    //获取状态数据&当前数据中的时间戳并转换为日期
                    String lastDate = lastVisitState.value();
                    JSONObject page = value.getJSONObject("page");
                    Long ts = page.getLong("ts");
                    String curDate = DateFormatUtil.toDate(ts);
                    if (lastDate == null || !lastDate.equals(curDate)) {
                        lastVisitState.update(curDate);
                        return true;
                    } else {
                        return false;
                    }
                }
            });
            //5.将数据写到Kafka
            String targetTopic = ConfigConstant.KAFKA_UNIQUE_VISITOR_TOPIC;

            uvDS.map(JSONAware::toJSONString).addSink(MyKafkaUtil.getFlinkKafkaProducer(targetTopic));

            //6.启动任务
            env.execute("DwdTrafficUniqueVisitorDetail");
        } catch (Exception e) {
            logger.error("exe DwdTrafficUniqueVisitorDetail error", e);
        }

    }

}
