package etl

import Configer.Config
import org.apache.spark.{SparkConf, SparkContext}
import util.JedisPools

//将字典数据整理写入redis
object DictToRedis {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",Config.serializer)

    val sc = new SparkContext(conf)
    val dictLine = sc.textFile("C:\\Users\\44323\\Desktop\\资料PDF\\app_dict.txt")
    val filter = dictLine.map(_.split("\t",-1)).filter(_.length>=5)

    filter.foreachPartition(partition=>{
      val jedis = JedisPools.getJedis(15)
      partition.foreach(row=>{
        jedis.hset("appdict",row(4),row(1))
      })
      jedis.close()
    })


    sc.stop()
  }
}
