package com.doit.shark.dsp.test

import org.apache.log4j.{Level, Logger}
import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
  * Created by hunter.coder 涛哥  
  * 2019/4/9 15:42
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description:  图计算api使用示例，识别哪些数据是同一个人
  **/
object SamePeople {


  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)
    val spark = SparkSession.builder().master("local").appName("people").getOrCreate()

    val rdd = spark.read.textFile(args(0)).rdd

    // 构造点集合（RDD[(Long,A)]
    val verticesRDD: RDD[(Long, String)] = rdd.flatMap(line => {

      var list = List[(Long, String)]()
      val split = line.split(",")
      val phone = split(1)
      val wx = split(2)
      (phone.hashCode.toLong,line) :: (wx.hashCode.toLong,line) :: Nil

    })


    // 构造边集合(RDD[Edge[A]])
    // Edge(srcId:Long,dstId:Long,data:A)
    val edgeRDD: RDD[Edge[String]] = rdd.map(line=>{
      // 1,13811,wx_a,刘德华
      val split = line.split(",")
      val phone = split(1)
      val wx = split(2)
      Edge(phone.hashCode.toLong,wx.hashCode.toLong,"")
    })


    // 利用点集合，边集合，构造一张“图“
    val graph = Graph(verticesRDD,edgeRDD)

    // 调用图的api，获取图中所有的连通子图
    // 返回的连通子图结果点集合：  里面还是之前的图中所有的点（id,属性），每个点的属性=>是这个点所属的连通子图中的最小点id
    val vertices: VertexRDD[VertexId] = graph.connectedComponents().vertices

    /**
      * joinRdd
      * (3663492,(3663491,3,13812,wx_b,华华))
      * (46827255,(3663491,2,13812,wx_a,华仔))
      * (46827255,(3663491,3,13812,wx_b,华华))
      * (46827287,(3663493,6,13823,wx_c,天宝))
      * (46827254,(3663491,1,13811,wx_a,刘德华))
      * (3663493,(3663493,5,13821,wx_c,杨颖))
      * (3663493,(3663493,6,13823,wx_c,天宝))
      * (3663491,(3663491,1,13811,wx_a,刘德华))
      * (3663491,(3663491,2,13812,wx_a,华仔))
      * (46827285,(3663493,4,13821,wx_d,angelababy))
      */
    val joinRdd = vertices.join(verticesRDD)

    joinRdd.map(tp=>{tp._2._1 + "," + tp._2._2}).distinct().saveAsTextFile("G:\\data_shark\\testdata\\graphx\\output")



    spark.close()

  }

}
