package region_search

import java.io.File

import org.apache.commons.io.FileUtils
import org.apache.spark.sql.SparkSession
import org.lionsoul.ip2region.{DbConfig, DbSearcher}

object SearchTestSpark {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("").master("local").getOrCreate()
    import spark.implicits._

    val rdd = spark.createDataset(Seq(
      "202.10.80.30",
      "10.10.8.60",
      "123.123.123.123")).rdd

    val file = new File("initdata/ip2region.db")
    val ba = FileUtils.readFileToByteArray(file)

    // 将这个ip字典库字节数组广播
    val bc = spark.sparkContext.broadcast(ba)

    val res = rdd.map(ip=>{
      val dictBytes: Array[Byte] = bc.value
      val config = new DbConfig()
      val searcher = new DbSearcher(config, dictBytes)
      val block = searcher.memorySearch(ip)
      val fields = block.getRegion.split("|")

      (ip,block.getRegion)
    })

    res.foreach(println)

    spark.close()
  }

}
