import org.apache.spark.graphx.{Edge, EdgeTriplet, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object GraphXEx1 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName(this.getClass.getCanonicalName)
      .setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    // 定义顶点
    val vertexArray: Array[(VertexId, String)] = Array(
      (1L, "SFO"),
      (2L, "ORD"),
      (3L, "DFW")
    )
    val vertexRDD: RDD[(VertexId, String)] = sc.makeRDD(vertexArray)

    // 定义边
    val edgeArray: Array[Edge[Int]] = Array(
      Edge(1L, 2L, 1800),
      Edge(2L, 3L, 800),
      Edge(3L, 1L, 1400)
    )
    val edgeRDD: RDD[Edge[Int]] = sc.makeRDD(edgeArray)

    // 图的定义
    val graph: Graph[String, Int] = Graph(vertexRDD, edgeRDD)

    println("所有顶点数:"+graph.vertices.count())
    graph.vertices
            .foreach(println)
    println("所有边数:"+graph.edges.count())
    graph.edges
      .foreach(println)
    println("所有三元组数:"+graph.triplets.count())
    graph.triplets.foreach(println)
    println("机场距离大于1000的有几个，按所有机场之间的距离排序（降序）输出")
    graph.triplets.filter { a => a.attr > 1000 }.repartition(1).sortBy(_.attr).foreach(println)

    sc.stop()
  }
}
