package com.dmp.total
import scala.collection.mutable.Map
import com.dmp.config.{ConfigHandler, MCode}
import com.dmp.util.JPool
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SQLContext}
import redis.clients.jedis.Jedis

object MediaIdAndName {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val sparkConf = new SparkConf()
    sparkConf.setAppName("分解媒体")
    sparkConf.setMaster("local[*]")
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(sparkConf)
    val dataRdd: RDD[String] = sc.textFile("C:\\Users\\92326\\Desktop\\Violet\\data\\app_dict.txt")
      dataRdd
      .filter(_.split("\t").length >= 5)
      .map(rdd => {
        val split: Array[String] = rdd.split("\t")
        (split(4), split(1))
      })
      .foreachPartition(iter=>{
      val jedis= JPool.getJedis
        iter.foreach(tp=>{
        jedis.set(tp._1,tp._2)
          //jedis.hincrBy()
      })
      jedis.close()
    })
/*    data.map(str=>{
      (str._1+"="+str._2)
    })
      .coalesce(1)
      .saveAsTextFile("f:/Violet/conf")*/
//val str: String = MCode.prop.getProperty("com.hongxiu.single.book125067")
  //  println(new String(str.getBytes("ISO-8859-1"), "utf-8"))
  }
}
