package com.atliuzu.app.dwd.log;

import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atliuzu.utils.DateFormatUtil;
import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

/**
 * @Author : songyuan
 * @Description :
 * Date : 2022/8/19 13:44
 * @Version : 1.0
 */

//当日各来源 独立访客数

public class DwdTrafficUniqueVisitorDetail {

    public static void main(String[] args) throws Exception {

        //1.创建流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.状态后端
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(30 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1), Time.minutes(1)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/ck"
//        );
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //3.读取page里面的数据，筛选出登录的数据（last_page_id==null）
        String topic = "dwd_traffic_page_log";
        String groupId = "dwd_traffic_user_jump_detail";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));


        SingleOutputStreamOperator<JSONObject> loginDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(value);

                    String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                    if (lastPageId == null) {
                        out.collect(jsonObject);
                    }
                } catch (Exception e) {
                   System.out.println("脏数据:" + value);

                }


            }
        });


        //4.按照mid分组
        KeyedStream<JSONObject, String> keyedDS = loginDS.keyBy(json -> json.getJSONObject("common").getString("mid"));

        //5.通过状态获取独立访客数据
        //定时器设置:设置状态的更新时间为一天  状态的读取模式为 onCreateAndRead
        //上次登录时间存入状态
        //本次登录时间与状态时间比较  上次登录为null 首次登录
        //                          上次登录时间不为今天  今天首次登录
        SingleOutputStreamOperator<JSONObject> firstLoginDS = keyedDS.process(new ProcessFunction<JSONObject, JSONObject>() {
            ValueState<String> lastDtState;

            @Override
            public void open(Configuration parameters) throws Exception {

                ValueStateDescriptor<String> loginDesc = new ValueStateDescriptor<>("login", String.class);

                StateTtlConfig config = new StateTtlConfig.Builder(Time.days(1L))
                        .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                        .build();
                loginDesc.enableTimeToLive(config);

                lastDtState = getRuntimeContext().getState(loginDesc);
            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                String lastDt = lastDtState.value();
                Long ts = value.getLong("ts");
                String curDt = DateFormatUtil.toDate(ts);

                if (lastDt == null || !lastDt.equals(curDt)) {
                    lastDtState.update(curDt);
                    out.collect(value);
                }
            }
        });

        //6.将数据写入独立访客主题
        firstLoginDS.print("==============");
        String topicLogin = "dwd_traffic_unique_visitor_detail";
//        filterDS.map(json->json.toJSONString())
//        .addSink(MyKafkaUtil.getFlinkKafkaProducer(topicOut));
        firstLoginDS.map(JSONAware::toJSONString)
                .addSink(MyKafkaUtil.getFlinkKafkaProducer(topicLogin));


        //7.启动任务
        env.execute();
    }

}
