package graph

import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

//共同好友
object Friend {
  def main(args: Array[String]): Unit = {
    //sparkconttext
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName(s"${this.getClass.getName}")

    val sc = new SparkContext(conf)

    val point: RDD[(VertexId, String)] = sc.makeRDD(Seq(
      (1L, "tom"),
      (2L, "tom1"),
      (9L, "tom2"),
      (6L, "tom3"),
      (16L, "tom4"),
      (21L, "tom5"),
      (44L, "tom6"),
      (5L, "tom7"),
      (7L, "tom8"),
      (133L, "phone1"),
      (138L, "phone2"),
      (158L, "phone3")
    ))
    point

    val edge: RDD[Edge[String]] = sc.makeRDD(Seq(
      Edge(1, 133, "wwwww"),
      Edge(2, 133, "wwwww"),
      Edge(9, 133, "wwwww"),
      Edge(6, 133, "wwwww"),
      Edge(5, 138, "wwwww"),
      Edge(6, 138, "wwwww"),
      Edge(21, 138, "wwwww"),
      Edge(44, 138, "wwwww"),
      Edge(16, 138, "wwwww"),
      Edge(5, 158, "wwwww"),
      Edge(7, 158, "wwwww")
    ))
    edge


    val graph = Graph(point,edge)
    val ver = graph.connectedComponents().vertices
    ver.map(tp=>(tp._2,Set(tp._1.toString))).reduceByKey(_++_)
    ver.join(point).map{
      case (id,(mid,name))=>(mid,List(name))
    }.reduceByKey(_++_).foreach(println)
    sc.stop()

  }
}
