package com.lagou.no2

import org.apache.log4j.{Level, Logger}
import org.apache.spark.graphx.{Edge, EdgeTriplet, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object No2 {
  def main(args: Array[String]): Unit = {
    //设置日志级别
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf: SparkConf = new
        SparkConf().setAppName(this.getClass.getCanonicalName).setMaster(
      "local[*]")
    val sc = new SparkContext(conf)
    //初始化数据
    // 定义顶点 (Long, info)
    val vertexArray: Array[(VertexId, String)] = Array(
      (1L, "SFO"),
      (2L, "ORD"),
      (3L, "DFW")
    )
    // 定义边 (Long, Long, attr)
    val edgeArray: Array[Edge[Int]] = Array(
      Edge(1L, 2L, 1800),
      Edge(2L, 3L, 800),
      Edge(3L, 1L, 1400),
    )

    // 构造vertexRDD和edgeRDD
    val vertexRDD: RDD[(VertexId, String)] = sc.makeRDD(vertexArray)
    val edgeRDD: RDD[Edge[Int]] = sc.makeRDD(edgeArray)
    // 构造图Graph[VD,ED]
    val graph: Graph[String, Int] = Graph(vertexRDD,edgeRDD)
    //所有的顶点
    graph.vertices.foreach(println)
    println("************************************")
    //所有的边
    graph.edges.foreach(println)
    println("************************************")
    //所有的triplets
    graph.triplets.foreach(println)
    println("************************************")
    //顶点数
    println(s"顶点数量为：${graph.vertices.count}")
    //边数
    println(s"顶点数量为：${graph.edges.count}")
    println("************************************")
    //机场距离大于1000的降序输出
    val arr: Array[EdgeTriplet[String, Int]] = graph.triplets.filter(_.attr > 1000)
      .sortBy(_.attr, false).collect()
    println(arr.toBuffer)



  }
}
