package SparkGraphXInAction

import org.apache.spark._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.graphx._
import org.apache.spark.graphx
import org.apache.spark.graphx.Graph._
import org.apache.spark.rdd.RDD
import org.apache.spark.graphx.util.GraphGenerators

/**
  * Created by Administrator on 2017/4/25 0025.
  */
object TestGreedy {
  def greedy[VD](g: Graph[VD,Double], origin:VertexId) = {
    var g2: Graph[Boolean,(Double,Boolean)] = g.mapVertices((vid, vd) => vid == origin).mapTriplets{
      et => (et.attr, false)
    }
    var nextVertexId = origin
    var edgesAreAvailable = true
    type tripletType = EdgeTriplet[Boolean, (Double, Boolean)]
    do{
      //怎样确定可用的边呢？如果这个边的第二个属性为false（表示这个边还没有被经过过），并且可以从边的这一头运
      //动到那一头（即存在某个顶点属性为false，并且与nextVertexId顶点相连）
      val availableEdges = g2.triplets.filter{et => !et.attr._2 && ((et.srcId == nextVertexId && !et.dstAttr)
                                  || (et.dstId == nextVertexId && !et.srcAttr))
                                }

      //判断是否还有边可走。如果还有边可以走，edgesAreAvailable值为true。
      edgesAreAvailable = availableEdges.count > 0
      //如果前面的判断edgesAreAvailable为true。则执行以下代码。
      if(edgesAreAvailable){
        //smallestEdge是可用的边中边的权重（或者说长度）最小的那个。
        val smallestEdge = availableEdges.min()(new Ordering[tripletType](){
          override def compare(a: tripletType, b: tripletType) = {
            Ordering[Double].compare(a.attr._1, b.attr._1)
          }
        })
        //更新nextVertexId
        nextVertexId = Seq(smallestEdge.srcId, smallestEdge.dstId).filter(_ != nextVertexId).head
        //mapVertices括号内的含义是，匹配顶点属性为true或者vid的值为nextVertexId
        g2 = g2.mapVertices((vid, vd) => vd || vid == nextVertexId).mapTriplets{ et => (et.attr._1, et.attr._2 ||
          (et.srcId == smallestEdge.srcId && et.dstId == smallestEdge.dstId))}
      }
    } while(edgesAreAvailable)
    g2
  }


  //main方法
  def main(args: Array[String]): Unit = {
    // 屏蔽日志
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)

    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    //生成图
    val myVertices = sc.makeRDD(Array((1L,"A"),(2L,"B"),(3L,"C"),(4L,"D"),(5L,"E"),(6L,"F"),(7L,"G")))
    val myEdges = sc.makeRDD(Array(Edge(1L,2L,7.0),Edge(1L,4L,5.0),Edge(2L,3L,8.0),Edge(2L,4L,9.0),
      Edge(2L,5L,7.0),Edge(3L,5L,5.0),Edge(4L,5L,15.0),Edge(4L,6L,6.0),
      Edge(5L,6L,8.0),Edge(5L,7L,9.0),Edge(6L,7L,11.0)))
    val myGraph = Graph(myVertices,myEdges)
    val g2 = greedy(myGraph,1L)
    val list_triplet = g2.triplets.collect
    for(tri <- list_triplet){
      println(tri)
    }
  }
}
