package cn.lagou.spark.graphX

import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object GraphXDemo {
  def main(args: Array[String]): Unit = {
    // 初始化
    val conf: SparkConf = new SparkConf()
      .setAppName(this.getClass.getCanonicalName)
      .setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    // 初始化数据
    // 定义顶点 (Long, info)
    val vertexArray: Array[(VertexId, String)] = Array(
      (1L, "SFO"),
      (2L, "ORD"),
      (3L, "DFW")
    )
    // 定义边 (Long, Long, attr)
    val edgeArray: Array[Edge[Int]] = Array(
      Edge(1L, 2L, 1800),
      Edge(2L, 3L, 800),
      Edge(3L, 1L, 1400)
    )
    // 构造vertexRDD和edgeRDD
    val vertexRDD: RDD[(Long, String)] =
      sc.makeRDD(vertexArray)
    val edgeRDD: RDD[Edge[Int]] = sc.makeRDD(edgeArray)
    // 构造图Graph[VD,ED]
    val graph: Graph[String, Int] = Graph(vertexRDD, edgeRDD)


    // 求所有的顶点
    println("所有的顶点：")
    graph.vertices.foreach(println)
    // 求所有的边
    println("\n所有的边：")
    graph.edges.foreach(println)

    // 求所有的triplets
    println("\n所有的triplets：")
    graph.triplets.foreach(println)

    // 求顶点数
    println(s"\n顶点数：${graph.vertices.count()}")

    // 求边数
    println(s"\n边数：${graph.edges.count()}")

    // 求机场距离大于1000的有几个，有哪些按所有机场之间的距离排序（降序），输出结果
    println("\n机场距离大于1000：")
    graph.edges
      .filter { edge => edge.attr > 1000 }
      .sortBy(_.attr)
      .foreach(println)

    sc.stop()

  }
}
