package etl

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import utils.JedisPools

//字段数据写入reids
object DictToRedis {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf().setAppName(s"${this.getClass.getName}")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)
    //读数据
    val lines = sc.textFile("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\app_dict.txt")
    //处理数据
    val arr = lines.map(_.split("\t",-1)).filter(_.length>=5)
    //存储
    arr.foreachPartition(partition=>{
      val jedis = JedisPools.getJedis(4)
      partition.foreach(ar=>{
        jedis.hset("appDict",ar(4),ar(1))
      })
      jedis.close()
    })
    //释放资源
    sc.stop()
  }
}
