package profile.dsplog

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

/**
  * Created by hunter.coder 涛哥  
  * 2019/4/17 20:25
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description:
  **/
object DspUserProfile {

  def combineIdsTags(b1: DspUserTagBean, b2: DspUserTagBean): Map[String, List[(String, Double)]] = {

    val imeis = b1.ids.getOrElse("imei",Nil) ++ b2.ids.getOrElse("imei",Nil)
    val idfas = b1.ids.getOrElse("idfa",Nil) ++ b2.ids.getOrElse("idfa",Nil)
    val androidids = b1.ids.getOrElse("androidid",Nil) ++ b2.ids.getOrElse("androidid",Nil)

    val cbImeis = imeis.groupBy(_._1).mapValues(lst=>lst.reduce((tp1,tp2)=>{(tp1._1,tp1._2+tp2._2.toDouble)})._2).toList
    val cbIdfas = idfas.groupBy(_._1).mapValues(lst=>lst.reduce((tp1,tp2)=>{(tp1._1,tp1._2+tp2._2.toDouble)})._2).toList
    val cbAndroidids = androidids.groupBy(_._1).mapValues(lst=>lst.reduce((tp1,tp2)=>{(tp1._1,tp1._2+tp2._2.toDouble)})._2).toList

    Array("imei","idfa","androidid").zip(Array(cbImeis,cbIdfas,cbAndroidids)).toMap
  }


  def combineKwdsTags(b1: DspUserTagBean, b2: DspUserTagBean): Map[String, List[(String, Double)]] = {

    val kwds = b1.kwdsTags.getOrElse("interests",Nil) ++ b2.kwdsTags.getOrElse("interests",Nil)

    val cbKwds = kwds.groupBy(_._1).mapValues(lst=>lst.reduce((tp1,tp2)=>{(tp1._1,tp1._2+tp2._2.toDouble)})._2).toList
    Map("interests"->cbKwds)
  }

  def combineBean(b1: DspUserTagBean, b2: DspUserTagBean): DspUserTagBean = {

    val idsTags:Map[String,List[(String,Double)]] = combineIdsTags(b1,b2)

    val kwdsTags:Map[String,List[(String,Double)]] = combineKwdsTags(b1,b2)

    /*combineDeviceTags(b1,b2)

    combineAdTags(b1,b2)

    combineLocTags(b1,b2)*/


    DspUserTagBean(b1.guid,idsTags,kwdsTags)
  }

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName(DspUserProfile.getClass.getSimpleName).master("local[*]").getOrCreate()

    val df = spark.read.parquet("G:\\data_shark\\testdata\\usertags\\idlog")
    df.show(10,false)

    val tmp: RDD[DspUserTagBean] = df.rdd.map(row=>{
      val guid = row.getAs[String]("guid")
      val idsMap = mutable.Map.empty[String,List[(String,Double)]]
      val kwdsMap = mutable.Map.empty[String,List[(String,Double)]]

      val imei = row.getAs[String]("imei")
      val idfa = row.getAs[String]("idfa")
      val androidid = row.getAs[String]("androidid")
      val keywords = row.getAs[String]("keywords")

      if(StringUtils.isNotBlank(imei)) idsMap += (("imei",(imei,1d)::Nil))
      if(StringUtils.isNotBlank(idfa)) idsMap += (("idfa",(idfa,1d)::Nil))
      if(StringUtils.isNotBlank(androidid)) idsMap += (("androidid",(androidid,1d)::Nil))
      if(StringUtils.isNotBlank(keywords)) kwdsMap += (("interests",keywords.split(" ").map((_,1d)).toList))

      DspUserTagBean(guid,idsMap.toMap,kwdsMap.toMap)
    })
      .map(tp=>(tp.guid,tp))
      .reduceByKey((b1,b2)=>combineBean(b1,b2)).map(_._2)
//      .groupBy(_.guid)


      tmp.take(10).foreach(println)

    spark.close()

  }

}
