package com.haozhen.stream.homework

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/16  0:43
  */
object GraphXwork {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.graphx.{Edge, Graph}
    import org.apache.spark.{SparkConf, SparkContext}
    val conf: SparkConf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")

    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val vertexArray = Array((1l,"SFO"),(2l,"ORD"),(3L,"DFW"))

    val edgeArray = Array(
      Edge(1l,2l,1800),
      Edge(2l,3l,800),
      Edge(3l,1l,1400)
    )

    val vertexRDD = sc.makeRDD(vertexArray)
    val edgeRDD = sc.makeRDD(edgeArray)

    val graph = Graph(vertexRDD,edgeRDD)

    println("--------")
    //求所有的顶点
    graph.vertices.foreach(println)
    println("--------")
    //求所有的边
    graph.edges.foreach(println)
    println("--------")
    //求所有的triplets
    graph.triplets.foreach(println)
    println("--------")
    //求顶点数
    var vertextCount = graph.vertices.count()
    println(s"顶点数:  $vertextCount")
    println("--------")
    //求边数
    var edgeCount = graph.edges.count()
    println(s"边数： $edgeCount")
    println("--------")
    //求机场距离大于1000的有几个，有哪些
    graph.edges.filter(_.attr>1000).foreach(println)
    println("--------")
    //按所有机场之间的距离排序（降序），输出结果
    graph.edges.sortBy(-_.attr).foreach(println)

  }
}
