package com.bkd.tools

import com.bkd.util.JedisPool
import org.apache.spark.{SparkConf, SparkContext}
import redis.clients.jedis.Jedis

object RedisUtil {

  def main(args: Array[String]): Unit = {
    //将数据字典中的数据 写入到redis
    // 0 校验参数个数
    if(args.length!=1){
      println(
        """
          |com.bkd.toolsRedisUtil
          |参数
          |arridctInputPath
        """.stripMargin)
      sys.exit()
    }

    //1 接受程序参数
    val Array(appdictInputPath)=args

    //// 2 创建sparkconf->sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.setMaster("local[*]")
    //RDD序列化到磁盘 worker与worker之间的数据传输
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(conf)

    sc.textFile(appdictInputPath)
      .map(line =>{
        val fields: Array[String] = line.split(":")
        (fields(0),fields(1))
      }).foreachPartition(itr=>{
      // 将 key和value写入到redis.
      val jedis: Jedis = JedisPool.getJedis()

      itr.foreach(t => {
        jedis.set(t._1,t._2)
      })
      jedis.close()
    })
    sc.stop()
  }
}
