package com.yanduo.graphx

import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 用户标签合并
  *
  * @author Gerry chan
  * date 2020/5/30 19:16
  * @version 1.0
  */
object UserTagsMergeDemo {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
    sparkConf.setAppName(s"${this.getClass.getSimpleName}")
    sparkConf.setMaster("local[*]")

    //RDD序列化到磁盘，worker 域worker之间的数据传输
    val sc = new SparkContext(sparkConf)

    //读取数据
    val data: RDD[Array[String]] = sc.textFile(args(0)).map(_.split("\t"))

    //根据数据创建点的集合 (使用flatMap  array 转变为 tuple)
    val uv: RDD[(VertexId, (String, List[(String,Int)]))] = data.flatMap(arr => {
      //区分人名和标签
      val userNames = arr.filter(_.indexOf(":") == -1)
      //val userTags = arr.filter(_.indexOf(":") != -1).toList
      //切分标签与数值
      val userTags: List[(String, Int)] = arr.filter(_.indexOf(":") != -1).map(kvs => {
        val kv = kvs.split(":")
        (kv(0), kv(1).toInt)
      }).toList

      //userNames.map(name => (name.hashCode.toLong, (name, userTags)))

      //为了避免标签重复，让第一个人携带标签，其他人不携带
      userNames.map(name => {
        if (name.equals(userNames(0))) (name.hashCode.toLong, (name, userTags)))
        else (name.hashCode.toLong, (name,List.empty[(String, Int)]))
      })


    })


    //根据数据创建边的集合
    val ue: RDD[Edge[Int]] = data.flatMap(arr => {
      //获取人名
      val userNames = arr.filter(_.indexOf(":") == -1)
      //构建边的联系
      userNames.map(name => Edge(userNames(0).hashCode.toLong, name.hashCode.toLong, 0))
    })



    //创建一个图
    val graph = Graph(uv, ue)
    val cc: VertexRDD[VertexId] = graph.connectedComponents().vertices  //(id, 共同的最小的顶点ID)

    //聚合数据， 通过Set实现去重
    cc.join(uv).map{
      case (id, (cmId,(name, tags))) => (cmId, (Set(name), tags))
    }.reduceByKey {
      case (t1, t2) => {
        ((t1._1 ++t2._1), (t1._2++t2._2).groupBy(_._1).mapValues(_.foldLeft(0)(_+_._2)).toList)
      }
    }.map(t => (t._2._1.toSet, t._2._2))


    sc.stop()
  }
}
