package com.need5

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}

/**
  * Created by zhuang on 2018/3/5.
  */
object DealUserTag extends App {

  private val load: Config = ConfigFactory.load()
  val conf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getSimpleName)
    .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
  val sc = new SparkContext(conf)
  //读取关键字的敏感词库
  val file1 = sc.textFile("file:///e:/stopwords.txt")
  //将关键字敏感库转换成map，方便查找
  //private val toMap: Map[String, String] = file1.flatMap(t => t.split("\n")).keyBy(t => "value").map(t => t.swap).collect.toMap
  //也可以用求差集的算子
  private val arr: Array[String] = file1.flatMap(t => t.split("\n")).collect
  //广播到executor端
  private val bcMap: Broadcast[Array[String]] = sc.broadcast(arr)
  //拿到sqlcontext对象，为了转换能parque文件
  val context: SQLContext = new SQLContext(sc)
  //读取APP文件
  private val file: RDD[String] = sc.textFile(load.getString("AppNameById"))
  //调取自定义方法处理appid映射,并广播出去
  private val bc: Broadcast[Map[String, String]] = sc.broadcast(DealDataSec.id2Name(file))
  //读取文件
  private val parquet: DataFrame = context.read.parquet(load.getString("DataForParquet"))
  //开始数据处理
  private val map = parquet.map(t => DealDataSec.getField(t, bc, bcMap))
  //过滤没有userid的
  private val filter = map.filter(t => StringUtils.isNotEmpty(t._1)).cache()
  //数据聚合
  private val key: RDD[(String, List[String])] = filter.reduceByKey((list1, list2) => {
    list1.zip(list2).map(t => (t._1 + "-" + t._2))
  })
  //每条数据处理
  private val values: RDD[(String, Map[String, Int])] = key.mapValues(t=>t.flatMap(str=>str.split("-")).groupBy(t=>t).map(m=>(m._1,m._2.size)))
  private val values1: RDD[(String, String)] = values.mapValues(t=>t.map(m=>m._1+":"+m._2).toList.sorted.mkString(","))
  values1.foreach(println)
  sc.stop()
}
