package cn.sheep.dolphin.graphxlearning

import cn.sheep.dolphin.common.DolphinAppComm
import org.apache.spark.graphx.{Edge, Graph}
import org.apache.spark.rdd.RDD

/**
  * author: old sheep
  * QQ: 64341393 
  * Created 2018/12/3
  */
object GoodFriendsPlus {

	def main(args: Array[String]): Unit = {

		val sc = DolphinAppComm.createSparkContext("用户标签合并")

		// 读取数据文件
		val simpleData = sc.textFile("F:\\20180715\\DAY14\\simple.data")

		// 构建点集合 RDD[(Long, VD)]
		val vertices: RDD[(Long, String)] = simpleData.map(line => line.split("\t")).flatMap(names => {
			names.map(name => (name.hashCode.toLong, name))
		})

		// 构建边集合
		val edges: RDD[Edge[String]] = simpleData.map(line => line.split("\t")).flatMap(names => {
			val firstPerson = names.head
			names.map(name => Edge(firstPerson.hashCode.toLong, name.hashCode.toLong, ""))
		})

		// 构建图对象
		val graph = Graph(vertices, edges)

		// 连通图算法(拿到了他们是否是同一个分支图中的点了)
		val cc = graph.connectedComponents().vertices

		// 聚合数据
		vertices.join(cc).map{
			case (_, (name, minId)) => (minId, Set(name))
		}.reduceByKey(_ ++ _).foreach(println)


		sc.stop()
	}

}
