package graph

import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object graph {
  def main(args: Array[String]): Unit = {
    //saprkContext
    val conf = new SparkConf().setAppName(s"${this.getClass.getName}").setMaster("local[*]")
    val sc = new SparkContext(conf)
    //读取数据
    //点集合
    val pointRDD: RDD[(VertexId, String)] = sc.makeRDD(Seq(
      (1L, "s1"),
      (2L, "s2"),
      (9L, "s9"),
      (6L, "s6"),
      (133L, "s133"),
      (138L, "s138"),
      (21L, "s21"),
      (16L, "s16"),
      (44L, "s44"),
      (158L, "s158"),
      (5L, "s5"),
      (7L, "s7")
    ))
    //边集合
    val edgeRDD: RDD[Edge[String]] = sc.makeRDD(Seq(
      Edge(1, 133, "pengyou"),
      Edge(2, 133, "pengyou"),
      Edge(9, 133, "pengyou"),
      Edge(6, 133, "pengyou"),
      Edge(6, 138, "pengyou"),
      Edge(21, 138, "pengyou"),
      Edge(44, 138, "pengyou"),
      Edge(16, 138, "pengyou"),
      Edge(5, 158, "pengyou"),
      Edge(7, 158, "pengyou")
    ))
    edgeRDD

    //图计算的实例
    /**
      * connectedComponents描述图信息，把有关联的点都放到一个图中
      * vertices  把图中的每一个点都和最小的点进行一对一关联
      */
    val graph = Graph(pointRDD,edgeRDD)
    val ver = graph.connectedComponents().vertices
//    ver.foreach(println)
    //ver.map(tp=>(tp._2,Set(tp._1))).reduceByKey(_++_).foreach(println)

    ver.join(pointRDD).map{
      case (id,(mid,name)) => (mid,List(name))
    }.reduceByKey(_++_).foreach(println)

    //释放资源
    sc.stop()
  }
}
