package com.cloud.core.graphx.demo

import org.apache.spark.graphx.{GraphLoader, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Graph_learning_Relation {

  val orgName = "google_research"

  val local_path = "/Users/fuhaixu/Desktop/work/program_journey/cloud_computing/github_data_analysis/cloud_backend/input_graph/" + orgName

  var sc: SparkContext = null
  val master = "local"
  val appName = "Graph_learning_04"

  // 初始化SparkContext
  def init(): Unit = {

    val sparkConf = new SparkConf()
      .setMaster(master)
      .setAppName(appName)

    sc = new SparkContext(sparkConf)
  }

  def main(args: Array[String]): Unit = {
    init()
    val graph = GraphLoader.edgeListFile(sc, local_path + "/edge.txt")

    val users: RDD[(VertexId, String)] = sc.textFile(local_path + "/vertex.txt").map {
      line =>
        val fields = line.split(",")
        (fields(1).toLong, fields(0))
    }
    val userInDeg = users.join(graph.inDegrees).map {
      case (id, (username, inDegree)) => (id, username, inDegree)
    }
    val out = userInDeg.sortBy(x => - x._3)
    println(out.collect().mkString("\n"))
  }
}
