package com.atguigu.app.dwd.log;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.util.DateFormatUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

public class DwdPageAndUniqueCount {
    public static void main(String[] args) throws Exception {
          StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
                  env.setParallelism(1);
                  // TODO 2 设置状态后端
                  /*
                  env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
                  env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
                  env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
                  env.setStateBackend(new HashMapStateBackend());
                  env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
                  System.setProperty("HADOOP_USER_NAME", "atguigu");
                   */
        String topicId = "dwd_traffic_page_log";
        String groupId = "dwd_page_and_unique_count";
        DataStreamSource<String> kafkaSource = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topicId, groupId));
        SingleOutputStreamOperator<JSONObject> jsonObjectSTream = kafkaSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                if (jsonObject != null){
                    jsonObject.put("page_count",1L);
                }else {
                    jsonObject.put("page_count",0L);
                }
                out.collect(jsonObject);
            }
        });
        SingleOutputStreamOperator<JSONObject> processStream = jsonObjectSTream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                return value.getJSONObject("common").getString("mid");
            }
        }).process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
            ValueState<String> isUnique = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> isUnique = new ValueStateDescriptor<>("isUnique", String.class);
                isUnique.enableTimeToLive((StateTtlConfig.newBuilder(Time.days(1L))).setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).build());
                this.isUnique = getRuntimeContext().getState(isUnique);
            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                String dT = isUnique.value();
                Long ts = value.getLong("ts") * 1000L;
                String date = DateFormatUtil.toDate(ts);
                if (dT == null || !dT.equals(date)) {
                    isUnique.update(date);
                    value.put("isUnique", 1L);
                } else {
                    value.put("isUnique", 0L);
                }
                out.collect(value);
            }
        });
        SingleOutputStreamOperator<String> resultStream = processStream.map(new MapFunction<JSONObject, String>() {
            @Override
            public String map(JSONObject value) throws Exception {
                return value.toJSONString();
            }
        });
        resultStream.print("123");
        resultStream.addSink(KafkaUtil.getFlinkKafkaProducer(groupId));
        env.execute(groupId);
    }
}
