package com.atguigu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.util.DateFormatUtil;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

public class DwdTrafficUvApp {


    // 1  消费kafka  dwd_traffic_page_log
    // 2  调整结构  jsonobj
    // 3  过滤：   利用状态 判断用户是否来过 ，维护用户的访问日期    算子状态 键空状态？
    // 4  保存到kafka

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        // 1  消费kafka  dwd_traffic_page_log
        String sourceTopic="dwd_traffic_page_log";
        String groupId="dwd_traffic_uv_app";
        String sinkTopic="dwd_traffic_uv_log";

        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(sourceTopic, groupId);

        DataStreamSource<String> kafkaStream = env.addSource(kafkaConsumer);
        // 2  调整结构  jsonobj
        SingleOutputStreamOperator<JSONObject> jsonObjDstream = kafkaStream.map(  JSON::parseObject );
        // 3  过滤：   利用状态 判断用户是否来过 ，维护用户的访问日期    算子状态 键空状态？
        KeyedStream<JSONObject, String> midKeyedDstream = jsonObjDstream.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));


        SingleOutputStreamOperator<JSONObject> uvStream = midKeyedDstream.filter(new RichFilterFunction<JSONObject>() {

            // 用户最后访问日期
            // 描述符



            ValueStateDescriptor<String> lastVisitValueStateDesc = new ValueStateDescriptor<String>("last_visit", String.class) ;

            ValueState<String> lastVisitValueState = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                StateTtlConfig stateTtlConfig = StateTtlConfig.newBuilder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).build();
                lastVisitValueStateDesc.enableTimeToLive(stateTtlConfig);
                lastVisitValueState = getRuntimeContext().getState(lastVisitValueStateDesc);

            }

            @Override
            public boolean filter(JSONObject jsonObject) throws Exception {



                //  1 利用状态 判断用户是否来过
                // 首先判断状态里的值 1 是否有  2 是否是今天 表示来过，过滤掉 返回false
                //                  如果没有  不过滤 ，但是要保存状态
                Long ts = jsonObject.getLong("ts");
                String curDate = DateFormatUtil.toDate(ts);
                //  严格控制统计口径  在一次用户连续的会话过程中，只有第一个页面具备计算uv的资格，后续翻页都不纳入统计
                //第一个页面的标识 last_page_id == null
                String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                if(lastPageId==null ||lastPageId.length()==0){
                    if (lastVisitValueState.value() != null && lastVisitValueState.value().length() > 0) {
                        if (lastVisitValueState.value().equals(curDate)) { //2 是否是今天 表示来过，过滤掉 返回false
                            return false;
                        } else {
                            lastVisitValueState.update(curDate); //以前来过，今天又来，要做签到
                            return true;
                        }
                    } else {
                        lastVisitValueState.update(curDate);
                        return true;
                    }
                }else {
                    return  false;
                }
            }
        });


       // uvStream.print("uv:::::");

        SingleOutputStreamOperator<String> sinkStream = uvStream.map(jsonObj -> JSON.toJSONString(jsonObj));
        sinkStream.addSink(MyKafkaUtil.getKafkaProducer(sinkTopic));

        env.execute();

    }
}
