package com.zyh.rcm.pvuv;

import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import com.zyh.rcm.entity.UniqueVisitorResult;
import com.zyh.rcm.entity.UserBehaviorEvent;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.evictors.TimeEvictor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousEventTimeTrigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class UVJob2 {
    public static void main(String[] args) throws Exception {

        String kafkaTopic = "topic-uv-pv";
        //自定义反序列化方式
        UserBehaviorEventDeserialization deserializationSchema = new UserBehaviorEventDeserialization();

        Properties kafkaProps = new Properties();
        //kafka集群ip,port
        kafkaProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop10:9092");
        //消费者所属组
        kafkaProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "flink");
        //构建消费者
        FlinkKafkaConsumer<UserBehaviorEvent> kafka = new FlinkKafkaConsumer<>(kafkaTopic, deserializationSchema, kafkaProps);
//        kafka.setStartFromEarliest();
        //topic分区和消费位移
        Map<KafkaTopicPartition, Long> offsets = new HashMap<>();

        offsets.put(new KafkaTopicPartition("topic-uv-pv",0),0L);
        kafka.setStartFromSpecificOffsets(offsets);
        kafka.setCommitOffsetsOnCheckpoints(false);

        //抽取水位线
        kafka.assignTimestampsAndWatermarks(new UserBehaviorEventWaterMarkerAssigner(2000));

        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //开启eventTime事件时间
        environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        environment.getConfig().setAutoWatermarkInterval(1);

        SingleOutputStreamOperator<UserBehaviorEvent> userBehaviorEventDataStream = environment.addSource(kafka);
//        .filter(userBehavior -> "click".equals(userBehavior.getAction()));

        SingleOutputStreamOperator<UniqueVisitorResult> result = userBehaviorEventDataStream
                .windowAll(TumblingEventTimeWindows.of(Time.days(1),Time.hours(-8)))
                .trigger(ContinuousEventTimeTrigger.of(Time.seconds(10)))
                .evictor(TimeEvictor.of(Time.seconds(0),true))
                .process(new ProcessAllWindowFunction<UserBehaviorEvent, UniqueVisitorResult, TimeWindow>() {
                    private ValueStateDescriptor<BloomFilter<Integer>> bloomFilterDescriptor;
                    private ReducingStateDescriptor<Integer> uvStateDescriptor;
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        System.out.println("UVJob2.open");
                        this.bloomFilterDescriptor = new ValueStateDescriptor<BloomFilter<Integer>>("bloomFilter", TypeInformation.of((Class<BloomFilter<Integer>>)(Class<?>)BloomFilter.class));
                        this.uvStateDescriptor = new ReducingStateDescriptor<Integer>("uvState",(v1,v2)->v1+v2, Types.INT);
                    }

                    @Override
                    public void process(Context context, Iterable<UserBehaviorEvent> elements, Collector<UniqueVisitorResult> out) throws Exception {


                        ValueState<BloomFilter<Integer>> bloomFilterValueState = context.windowState().getState(this.bloomFilterDescriptor);
                        BloomFilter<Integer> filter = bloomFilterValueState.value();
                        if(filter == null) {
                            filter = BloomFilter.create(Funnels.integerFunnel(), 10000, 0.0001);
                        }


                        ReducingState<Integer> uvState = context.windowState().getReducingState(uvStateDescriptor);
                        for (UserBehaviorEvent element : elements) {
                            if (!filter.mightContain(element.getUserId())) {
                                uvState.add(1);
                            }
                            filter.put(element.getItemId());
                        }
                        bloomFilterValueState.update(filter);

                        TimeWindow window = context.window();
                        long start = window.getStart();
                        long end = window.getEnd();

                        int count = uvState.get();
                        UniqueVisitorResult result = new UniqueVisitorResult(start, end, count);
                        out.collect(result);
                    }
                });

        result.print().setParallelism(1);

        environment.execute("pv");
    }

}