package com.zhu.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.zhu.utils.DataFormatUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * UV
 */

/*
数据流uv  web/app ->Nginx
 */
public class DWDTrafficUniqueVisitorDetailApp {

    public static void main(String[] args) throws Exception {
        //todo 1. environment
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(1);   //test  生产环境设置为 kafka主题的分区数量 4


        //checkPoint
        /*
        streamExecutionEnvironment.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE); //精确一次

        //状态后端
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略
         */
        //todo 2. kafka page_topic
        String topic = "dwd_traffic_page_log";
        String groupId = "Unique_Visitor_Detail_zhu_2023";
        DataStreamSource<String> kafkaDStream = streamExecutionEnvironment.addSource(ZhuKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        //todo 3. filter data last_page_id !=null to jasonObj
        SingleOutputStreamOperator<JSONObject> jsonWithFirstVisitDStream = kafkaDStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    //last_page_id != null
                    String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                    if (lastPageId == null) {
                        collector.collect(jsonObject);
                    }
                } catch (Exception exception) {
                    System.out.println(value);
                    exception.printStackTrace();
                }
            }
        });
        //todo 4. groupBy mid
        KeyedStream<JSONObject, String> keyedByMidDStream = jsonWithFirstVisitDStream.keyBy(json -> json.getJSONObject("common").getString("mid"));
        //todo 5. state
        SingleOutputStreamOperator<JSONObject> userVisitFilterDStream = keyedByMidDStream.filter(new RichFilterFunction<JSONObject>() {

            private ValueState<String> lastVisitState;

            //设置状态存活实时间为TTL为1,保留一天  定时器 process
            @Override
            public boolean filter(JSONObject jsonObject) throws Exception {
                //状态数据和当前日期的时间
                String lastDate = this.lastVisitState.value();
                Long ts = jsonObject.getLong("ts");
                String currentDate = DataFormatUtil.toDate(ts);
                if (lastDate == null || !lastDate.equals(currentDate)) {
                    lastVisitState.update(currentDate);
                    return true;
                } else {
                    //不为空并且相等
                    return false;
                }
            }



            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> lastVisitStateDescriptor = new ValueStateDescriptor<>("lastVisitState", String.class);

                //设置更新时间
                StateTtlConfig stateTtlConfig = new StateTtlConfig.Builder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).build();
                lastVisitStateDescriptor.enableTimeToLive(stateTtlConfig);

                lastVisitState = getRuntimeContext().getState(lastVisitStateDescriptor);
            }
        });

        //todo 6.to kafka
        String targetTopic = "dwd_traffic_unique_visitor_detail";
        userVisitFilterDStream.print(">>>>>");
        userVisitFilterDStream.map(JSONAware::toJSONString).addSink(ZhuKafkaUtil.getFlinkKafkaProducer(targetTopic));

        //todo 7.execute
        streamExecutionEnvironment.execute("uv");

    }
}
