package com.rz.spark.report

import com.rz.spark.utils.JedisPools
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 将字典库写入到redis中
  */
object AppDict2Redis {
  def main(args: Array[String]): Unit = {
    if (args.length != 3){
      println(
        """
          |com.rz.spark.report.AppDict2Redis
          |参数：
          | appdictInputPath
        """.stripMargin)
    }


    val Array(appdictInputPath) =args

    // 2 创建sparkConf-》sparkContext
    val sparkConf = new SparkConf()
    sparkConf.setAppName(s"${this.getClass.getSimpleName}")
    sparkConf.setMaster("local[*]")
    // RDD 序列化到磁盘 worker与worker之间的数据传输
    sparkConf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(sparkConf)

    sc.textFile(appdictInputPath).map(line=>{
      val fields = line.split("\t", -1)
      (fields(4),fields(1))
    }).foreachPartition(itr=>{
      val jedis = JedisPools.getJedis

      itr.foreach(t=>{
        jedis.set(t._1, t._2)
      })
      jedis.close()

    })

    sc.stop()
  }
}
