package homework2

import org.apache.spark.graphx.{Edge, EdgeRDD, EdgeTriplet, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Homework2 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("h2").setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    val vertexArray: Array[(VertexId, String)] = Array(
      (1L, "SFO"),
      (2L, "ORD"),
      (3L, "DFW")
    )

    val edgeArray: Array[Edge[Int]] = Array(
      Edge(1L, 2L,1800),
      Edge(2L, 3L, 800),
      Edge(3L, 1L, 1400)
    )

    val vertexRDD = sc.makeRDD(vertexArray)
    val edgeRDD = sc.makeRDD(edgeArray)

    val graph: Graph[String, Int] = Graph(vertexRDD, edgeRDD)
    // 求所有的顶点
    println("所有顶点")
    val allVertices: VertexRDD[String] = graph.vertices
    allVertices.foreach(println(_))
    println("----------------------------------")
    // 求所有的边
    println("所有边")
    val allEdges: EdgeRDD[Int] = graph.edges
    allEdges.foreach(println(_))
    println("----------------------------------")
    // 求所有的triplets
    println("所有triplets")
    val allTriplets: RDD[EdgeTriplet[String, Int]] = graph.triplets
    allTriplets.foreach(println(_))
    println("----------------------------------")
    // 求顶点数
    println("求顶点数")
    val verticesNumber = allVertices.map{case (id: VertexId, name: String) => (id, 1)}.reduce((x, y) => (x._1,x._2 + y._2))
    println(verticesNumber._2)
    println("----------------------------------")
    // 求边数
    println("求边数")
    val edgesNumber = allEdges.map(x => (x, 1)).reduce((x, y) => (x._1, x._2 + y._2))
    println(edgesNumber._2)
    println("----------------------------------")
    // 求机场距离大于1000的有几个，有哪些按所有机场之间的距离排序（降序），输出结果
    println("求机场距离大于1000的边")
    val distRDD = allEdges.filter(edge => edge.attr > 1000).sortBy((edge => edge.attr), false, 1)
    distRDD.foreach(println(_))
  }
}
