package analysis_uv;

import net.agkn.hll.HLL;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.shaded.guava18.com.google.common.hash.BloomFilter;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.roaringbitmap.longlong.Roaring64NavigableMap;
import redis.clients.jedis.Jedis;

/**
 * @author zkq
 * @date 2022/10/5 20:27
 */
public class test {
    //布隆过滤器和状态结合使用去重
    public class BloomFilterDistinct extends KeyedProcessFunction<Long, String, Long> {
        //import org.apache.flink.shaded.guava18.com.google.common.hash.BloomFilter;
        private transient ValueState<BloomFilter> bloomState;
        private transient ValueState<Long> countState;

        @Override
        public void processElement(String value, Context context, Collector<Long> out) throws Exception {

            final BloomFilter bloomFilter = bloomState.value();
            Long skuCount = countState.value();

            if (skuCount == null) {
                skuCount = 0L;
            }
            if (!bloomFilter.mightContain(value)) {
                bloomFilter.put(value);
                skuCount = skuCount + 1;
            }
            bloomState.update(bloomFilter);
            countState.update(skuCount);
            out.collect(countState.value());
        }


        //基于压缩位图 需要依赖
        public class BitMapDistinct implements AggregateFunction<Long, Roaring64NavigableMap,Long> {

            @Override
            public Roaring64NavigableMap createAccumulator() {
                return new Roaring64NavigableMap();
            }
            @Override
            public Roaring64NavigableMap add(Long value, Roaring64NavigableMap accumulator) {
                accumulator.add(value);
                return accumulator;
            }
            @Override
            public Long getResult(Roaring64NavigableMap accumulator) {
                return accumulator.getLongCardinality();
            }
            @Override
            public Roaring64NavigableMap merge(Roaring64NavigableMap a, Roaring64NavigableMap b) {
                return null;
            }
        }
    }
    //基于hyperloglog去重
        public class HyperLogLogDistinct implements AggregateFunction<Tuple2<String, Long>, HLL, Long> {
            @Override
            public HLL createAccumulator() {
                return new HLL(14, 5);
            }

            @Override
            public HLL add(Tuple2<String, Long> value, HLL accumulator) {
                //value为访问记录<商品，id>
                //向HyperLogLog中插入元素
                accumulator.addRaw(value.f1);
                return accumulator;
            }

            @Override
            public Long getResult(HLL accumulator) {
                //计算HyperLogLog中元素的基数
                final long cardinality = accumulator.cardinality();
                return cardinality;
            }

            @Override
            public HLL merge(HLL a, HLL b) {
                a.union(b);
                return a;
            }
        }

    public static void main(String[] args) {
        // 插入1W条数据
        Jedis jedis = new Jedis("hadoop102",6379);
        for (int i=0; i < 10000; i++){
            jedis.pfadd("abin","zk"+i);
        }
        long abinCount = jedis.pfcount("abin");
        //10055 多了55条 有误差
        System.out.println(abinCount);
        jedis.close();
    }

}
