package org.yonggan.dmp.graphx

import org.apache.log4j.{Level, Logger}
import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 共同好友
  * 使用Spark 连通图计算测试demo
  */
object ComFriends {


  /**
    * @author Mr.zhao
    *         2018/6/4 23:11
    */

  Logger.getLogger("org").setLevel(Level.DEBUG)

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
    sparkConf.setAppName("共同好友")
    sparkConf.setMaster("local[*]")

    val sc = new SparkContext(sparkConf)
    // 点集合 RDD[(Long, String)]
    val vertexRDD: RDD[(VertexId, String)] = sc.makeRDD(Seq(

      (1L, "zhangwuji"),
      (2L, "zhaomin"),
      (9L, "zhuer"),
      (6L, "zhangcuisham"),
      (133L, "zhangsanfeng"),

      (138L, "jinmaoshiwang"),
      (16L, "baimeiyingwang"),
      (44L, "zhouzhiruo"),
      (21L, "susu"),

      (158L, "yangxiao"),
      (7L, "yangbuhui"),
      (5L, "jixiaofu")
    ))
    // 边集合
    val edgesRDD: RDD[Edge[Int]] = sc.makeRDD(Seq(
      Edge(1, 133, 0),
      Edge(2, 133, 0),
      Edge(9, 133, 0),
      Edge(6, 133, 0),

      Edge(6, 138, 0),
      Edge(21, 138, 0),
      Edge(16, 138, 0),
      Edge(44, 138, 0),

      Edge(5, 158, 0),
      Edge(7, 158, 0)
    ))
    // 构建图
    val graph: Graph[String, Int] = Graph(vertexRDD, edgesRDD)
    /**
      * connectedComponents 可以找到图中可以联通图的分支    2个分支
      * vertices 每个连通图中的所有的点都会和该图分支中所有的点进行组合 (id,  图中最小的id)
      */
    val cc = graph.connectedComponents().vertices
//    cc.foreach(println)
    //     cc.map(tp => (tp._2, Set(tp._1))).reduceByKey(_ ++ _).foreach(println)


    cc.join(vertexRDD).map {
      case (id, (minId, name)) => (minId, List((id, name)))
    }.reduceByKey(_ ++ _).foreach(println)

    sc.stop()

  }


}
