import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 功能解释：
  *
  * @Auther: Super Yang
  * @Date: 2019-02-18 17:32
  * @Description:
  */
object UserTagsMergeDemo {
  def main(args: Array[String]): Unit = {
    val sparkConf =new SparkConf()
      .setAppName(s"${this.getClass.getSimpleName}")
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .set("spark.testing.memory", "2147480000")


    val sc = new SparkContext(sparkConf)
    val data: RDD[Array[String]] = sc.textFile(args(0)).map(_.split(" "))
    //点
    val uv= data.flatMap(arr => {
      //张三  Angelababy  波多  APP爱奇艺：10   BS沙河：8
      //张三。tohashcode （张三，App）
      val userNames = arr.filter(_.indexOf(":") == -1)
      val userTags = arr.filter(_.indexOf(":") != -1).map(kvs => {
        val kv =kvs.split(":")
        (kv(0),kv(1).toInt)
      }).toList
      userNames.map(name =>{
        if(name.equals(userNames(0))) (name.hashCode.toLong, (name, userTags))
        else (name.hashCode.toLong,(name,List.empty[(String,Int)]))
      })
    })

    val ue: RDD[Edge[Int]] = data.flatMap(arr => {
      val userNames = arr.filter(_.indexOf(":") == -1)
      userNames.map(name => Edge(userNames(0).hashCode.toLong, name.hashCode.toLong, 0))
    })



    //图
    val graph =Graph(uv,ue)
    val cc = graph.connectedComponents().vertices


    /*
    * uv ->(UsId,(姓名，年级))
    *CommoV ——>(UsId,共同好友)
    * uv.join(commV) -> (UsId,((姓名，年级)，共同好友))
    * */
    cc.join(uv).map {
      case (id, ( cmId,(name, tags))) => (id,(name, tags))
    }.reduceByKey{
      case(t1,t2) =>(t1._1 + "," + t2._1,t1._2 ++ t2._2)
    }.foreach(println)


    sc.stop()
  }
}
