package com.doit.day03;

import com.alibaba.fastjson.JSON;
import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.roaringbitmap.RoaringBitmap;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.Timer;
import java.util.TimerTask;

public class BloomFilterIsNew {

    public static void main(String[] args) {
        //创建一个布隆过滤器对象
        BloomFilter<Long> longBloomFilter = BloomFilter.create(Funnels.longFunnel(), 1000000);

        //创建bitMap对象
        RoaringBitmap bitmap = RoaringBitmap.bitmapOf();
        new Thread(new BloomFilterConsumerTask(bitmap)).start();

//        new Thread(new BloomFilterConsumerTask(longBloomFilter)).start();

    }
}

class BloomFilterConsumerTask implements Runnable{
    private BloomFilter<Long> longBloomFilter;
    private RoaringBitmap bitmap;

    public BloomFilterConsumerTask(RoaringBitmap bitmap){
        this.bitmap = bitmap;
    }


    public BloomFilterConsumerTask(BloomFilter<Long> longBloomFilter){
        this.longBloomFilter = longBloomFilter;
    }

    @Override
    public void run() {
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"linux01:9092,linux02:9092,linux03:9092");
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"abcd");
        props.setProperty("allow.auto.create.topics","true");
        props.setProperty("auto.offset.reset","earliest");
        props.setProperty("enable.auto.commit","true");
        props.setProperty("auto.commit.interval.ms","3000");

        //创建kafka的消费者对象
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Arrays.asList("event-log"));

        while (true){
            ConsumerRecords<String, String> poll = consumer.poll(Duration.ofMillis(Integer.MAX_VALUE));
            for (ConsumerRecord<String, String> record : poll) {
                //写进去的json串
                String json = record.value();
                EventLog eventLog = JSON.parseObject(json, EventLog.class);
                //判断一下这个guid是否存在于布隆过滤器中 如果存在，把他设置成0，如果不存在，就设置成1 代表新用户
                boolean flag = longBloomFilter.mightContain(eventLog.getGuid());

                boolean flag1 = bitmap.contains((int) eventLog.getGuid());
                //代表是true
                if (flag1){
                    //把字段设置成0
                    eventLog.setIsNew(0);
                }else {
                    //不存在，代表是新用户
                    eventLog.setIsNew(1);
                    //把新用户标注到布隆过滤器中
                    longBloomFilter.put(eventLog.getGuid());
                    //标注到bitmap中，下次去判断的时候就是一个老用户
                    bitmap.add((int) eventLog.getGuid());
                }
                System.out.println(eventLog);


            }
        }
    }
}
