import cn.doitedu.commons.utils.SparkUtil
import org.apache.hadoop.util.bloom.{BloomFilter, Key}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object BloomSparkDemo {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local").getOrCreate()
    import spark.implicits._
    val ds = spark.createDataset(List("aaa", "bbb", "ccc", "ddd"))

    // prob = (1-e^(-kn/m))^(k)
    val filter: BloomFilter = new BloomFilter(1000000, 5, 1)

    filter.add(new Key("aaa".getBytes()))
    filter.add(new Key("bbb".getBytes()))
    filter.add(new Key("ccc".getBytes()))

    val bc = spark.sparkContext.broadcast(filter)



    val res = ds.map(s=>{
      val flt = bc.value
      flt.membershipTest(new Key(s.getBytes()))
    })

    res.show(10,false)

    spark.close()

  }

}
