package com.atguigu.edu.realtime.app.dwd.log;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.common.kafkaTopics;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @author Lec
 * @date 2022/9/3 18:15
 */

public class DwdTrafficUniqueVisitorDetail {
    public static void main(String[] args) throws Exception {
        //TODO 1. 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2. 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        */
        String groupId = "dwd_traffic_unique_visitor_detail";
        //TODO 3. 读取kafka对应主题数据
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.getKafkaConsumer(kafkaTopics.DWD_TRAFFIC_PAGE_LOG, groupId));

        //TODO 4. 清洗掉last_page_id != null 加转换
        SingleOutputStreamOperator<JSONObject> flatMapStream = streamSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);

                JSONObject page = jsonObject.getJSONObject("page");
                String lastPageId = page.getString("last_page_id");
                if (lastPageId == null) {
                    out.collect(jsonObject);
                }

            }
        });


        // TODO 5 根据mid去重
        KeyedStream<JSONObject, String> keyedStream = flatMapStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                JSONObject common = value.getJSONObject("common");
                String mid = common.getString("mid");
                return mid;
            }
        });

        SingleOutputStreamOperator<JSONObject> filterStream = keyedStream.filter(new RichFilterFunction<JSONObject>() {
            ValueState<String> lastLoginDtState = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                lastLoginDtState = getRuntimeContext().getState(new ValueStateDescriptor<String>("last_login_dt", String.class));

            }

            @Override
            public boolean filter(JSONObject value) throws Exception {
                String lastLoginDt = lastLoginDtState.value();
                Long ts = value.getLong("ts");
                String loginDate = DateFormatUtil.toDate(ts);
                //状态为空或者登录日期等于今天
                if (lastLoginDt == null || !loginDate.equals(lastLoginDt)) {
                    lastLoginDtState.update(loginDate);
                    return true;
                } else {
                    return false;
                }
            }
        });
//        filterStream.print();

        // TODO 6 写出到新的kafka主题中
        String targetTopic = "dwd_traffic_unique_visitor_detail";

        SingleOutputStreamOperator<String> mapStream = flatMapStream.map(s -> s.toJSONString());

        mapStream.addSink(KafkaUtil.getKafkaProducer(targetTopic));


        // TODO 执行任务
        env.execute(groupId);
    }
}
