package cn.bigdata.graphx.job

import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.{SparkConf, SparkContext}

object Case1 {

  case class User(name: String, age: Int, inDegrees: Int, outDegrees: Int)

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName(this.getClass.getName.init)
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    // 顶点数据制作
    val vertexArray: Array[(VertexId, String)] = Array(
      (1L, "SFO"),
      (2L, "ORD"),
      (3L, "DFW"),
    )

    // 边数据制作
    val edgeArray: Array[Edge[Int]] = Array(
      Edge(1L, 2L, 1800),
      Edge(2L, 3L, 800),
      Edge(3L, 1L, 1400)
    )

    val vertexRDD = sc.makeRDD(vertexArray)
    val edgeRDD = sc.makeRDD(edgeArray)

    // 生成图
    val graph = Graph(vertexRDD, edgeRDD)

    println("所有顶点")
    graph.vertices.foreach(println)
    println("所有边")
    graph.edges.foreach(println)
    println("所有的triplets")
    graph.triplets.foreach(println)
    println("求顶点数 ", graph.vertices.count())
    println("所有边数 ", graph.edges.count())
    println("求机场距离大于1000的个数 ", graph.edges.filter(edges => edges.attr > 1000).count())

    println("按所有机场的距离排序降序")
    println("升序")
    graph.edges.sortBy(_.attr).foreach(println)
    println("降序")
    graph.edges.sortBy(_.attr, ascending = false).foreach(println)


    sc.stop()
  }
}
