package com.wang.dmp.graphx

import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object CommonFriendsPlus {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("共同好友推荐")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") //序列化
    val sc = new SparkContext(conf)

    //读取原始数据本身
    val data = sc.textFile("f:\\a.txt").map(_.split("\t"))

    //1.构建点的集合RDD[(Long,String)]
    val verties: RDD[(Long, String)] = data.flatMap(nameArray => {
      nameArray.map(name => (name.hashCode.toLong, name))
    })


    //2.够简边的集合RDD[Edge[ED]]
    val edges: RDD[Edge[Int]] = data.flatMap(nameArray => {
      //拿出数组的头元素
      val head = nameArray.head.hashCode.toLong
      nameArray.map(name => Edge(head, name.hashCode.toLong, 0))
    })

    //3.构建图对象
    //val cc = new Graph(verties,edges)

    //4.调用api
    //val cc = graph.connectedComponents().verties.foreach(println)
    //cc.join(verties).map{
    //  case (userId,(commonMinId,name)) => (commonMinId,Set(name)) //Set集合去重
    //}.reduceByKey(_ ++ _).map(tp => tp._2.mkString(",")).foreach(println)
    //关闭
    sc.stop()
  }
}
