package graph

import org.apache.spark.graphx.{Edge, Graph}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

//共同好友案例2
object Friend2 {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")

    val sc = new SparkContext(conf)

    //读取数据
    val lines = sc.textFile("C:\\Users\\44323\\Desktop\\资料PDF\\图计算案例2.txt")

    //处理数据
    //点集合
    val point: RDD[(Long, String)] = lines.flatMap(line => {
      val names = line.split("\t", -1) //array(string)
      names.map(name => (name.hashCode.toLong, name))
    }).distinct()
    point


    //线集合
    val edge: RDD[Edge[Int]] = lines.flatMap(line => {
      val names = line.split("\t", -1) //array(1,2,3,4)  array(1,2,3,4)
      //names.tail.zip(names).map(t => Edge(t._1.hashCode.toLong, t._2.hashCode.toLong, 0))
      names.map(name=>Edge(names.head.hashCode.toLong,name.hashCode.toLong,0))
    })
    edge



    //图实例
    val graph = Graph(point,edge)
    val ver = graph.connectedComponents().vertices
    ver.join(point).map{
      case (id,(mid,name)) => (mid,name)
    }.reduceByKey((a,b)=>a.concat(",").concat(b)).foreach(println)



    //释放资源
    sc.stop()
  }
}
