package profile.dsplog

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer

/**
  * Created by hunter.coder 涛哥  
  * 2019/4/16 15:57
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description:
  **/
object DspUserTag {

  def combine(bean: DspUserTagBean, x: DspUserTagBean)= {
    val bf = ArrayBuffer.empty[(String, List[(String, Double)])]
    bean.ids.copyToBuffer(bf)
    x.ids.copyToBuffer(bf)
    val idsAll = bean.ids ++ x.ids
    println(idsAll)


    val tmp: Map[String, (String, List[(String, Double)])] = idsAll.groupBy(_._1)
      .mapValues(arr => arr.reduce((tp1, tp2) => {
        (tp1._1, tp1._2.++(tp2._2))
      }))
      .mapValues(tp => {
        val lst = tp._2
        // List((b1,1.0), (b2,1.0), (b1,1.0), (b3,1.0)))
        val tmp = lst.groupBy(_._1).map(tp => (tp._1, tp._2.map(_._2).reduce(_ + _))).toList

        (tp._1, tmp)
      })
    DspUserTagBean("",tmp.values.toMap,null)
  }

  def main2(args: Array[String]): Unit = {
    val m1 = Map("a" -> 1d, "b" -> 1d)
    val m2 = Map("a" -> 1d, "c" -> 1d)

    val bf = ArrayBuffer.empty[(String, Double)]
    m1.copyToBuffer(bf)
    m2.copyToBuffer(bf)
    val s: Map[String, ArrayBuffer[(String, Double)]] = bf.groupBy(_._1)
    println(s)

    val res = s.mapValues(iter => iter.reduce((x, y) => {
      (x._1, x._2 + y._2)
    }))
    println(res)


    val m11 = Map("a" -> List(("a1", 1d)), "b" -> List(("b1", 1d), ("b2", 1d)))
    val m12 = Map("a" -> List(("a1", 1d)), "b" -> List(("b1", 1d), ("b3", 1d)))

    val bf1 = ArrayBuffer.empty[(String, List[(String, Double)])]

    m11.copyToBuffer(bf1)
    m12.copyToBuffer(bf1)


    val ss: Map[String, ArrayBuffer[(String, List[(String, Double)])]] = bf1.groupBy(_._1)
    // Map(b -> ArrayBuffer((b,List((b1,1.0), (b2,1.0))), (b,List((b1,1.0), (b3,1.0)))), a -> ArrayBuffer((a,List((a1,1.0))), (a,List((a1,1.0)))))
    println(ss)

    val s2 = ss.mapValues(arr => arr.reduce((tp1, tp2) => {
      (tp1._1, tp1._2.++(tp2._2))
    }))
    // Map(b -> (b,List((b1,1.0), (b2,1.0), (b1,1.0), (b3,1.0))), a -> (a,List((a1,1.0), (a1,1.0))))
    println(s2)

    val s3 = s2.mapValues(tp => {
      val lst = tp._2
      // List((b1,1.0), (b2,1.0), (b1,1.0), (b3,1.0)))
      val tmp = lst.groupBy(_._1).map(tp => (tp._1, tp._2.map(_._2).reduce(_ + _))).toList

      (tp._1, tmp)
    })


    println(s3)


  }


  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName(DspUserTag.getClass.getSimpleName).master("local[*]").getOrCreate()

    val ds = spark.read.textFile("G:\\data_shark\\testdata\\usertags\\input\\1.dat")
    //val ds = spark.read.textFile("G:\\sharkdata\\log_guid")

    import spark.implicits._
    val beanRdd = ds.rdd.map(line => {

      val fields = line.split(",")
      val guid = fields(0)
      val ids = fields(1).split(":", -1)
      val kwds = fields(2).split(" ", -1)
      val kwdsList = kwds.map((_, 1d)).toList


      val idsMap = Array("imei", "idfa", "androidid")
        .zip(ids)
        .filter(tp => StringUtils.isNotBlank(tp._2))
        .map(tp => (tp._1, List((tp._2, 1d))))
        .toMap

      DspUserTagBean(guid, idsMap, Map("interestKwds" -> kwdsList))

    }) /*.take(10).foreach(println)*/
    /**
      * DspUserTagBean(1,Map(imei -> List((imei011,1.0)), idfa -> List((idfa01,1.0)), androidid -> List((android01,1.0))),Map(interestKwds -> List((醇品,1.0), (咖啡,1.0), (益达,1.0))))
      * DspUserTagBean(1,Map(imei -> List((imei012,1.0))),Map(interestKwds -> List((护手霜,1.0), (益达,1.0), (醇品,1.0))))
      * DspUserTagBean(2,Map(imei -> List((imei02,1.0)), idfa -> List((idfa02,1.0))),Map(interestKwds -> List((网球拍,1.0), (spark,1.0), (大数据,1.0))))
      * DspUserTagBean(2,Map(imei -> List((imei02,1.0)), androidid -> List((andoridid02,1.0))),Map(interestKwds -> List((网球拍,1.0), (spark,1.0), (大数据,1.0))))
      */

    val value = beanRdd.groupBy(_.guid).mapValues(it => it.reduceRight((bean, x) => {
      combine(bean, x)
    }))


    value.take(10).foreach(println)


    spark.close()
  }

}
