package spark_graphx.spark_graphx

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import java.lang.Long
import org.apache.spark.graphx.Edge
import org.apache.spark.graphx.Graph
import org.apache.spark.storage.StorageLevel
import org.apache.spark.graphx._

case class OrderAttr(orderID: Long, amount: Float, customerIP:String, driverIP: String)

object CustomerDriverGraphX {
  def main(args: Array[String]): Unit = {
    System.setProperty("hadoop.home.dir", "D:/worktools/hadoop-2.7.3");
    
    //设置运行环境
    val conf = new SparkConf().setAppName("CustomerDriverGraphX").setMaster("local[2]")
    val sc = new SparkContext(conf)
    
    val fileRDD : RDD[String] = sc.textFile("file:///D:/文档/graphx/sample-data.txt", 2);
    
    var edges : RDD[Edge[OrderAttr]] = fileRDD.map(line => {
      val fields : Array[String] = line.split("\t")
      Edge(("1"+fields(1)).toLong, ("2"+fields(2)).toLong, OrderAttr(fields(0).toLong, fields(3).toFloat, fields(4), if(fields.length == 6) fields(5) else ""))
    });
    
    //构造图Graph[VD,ED]
    val graph: Graph[Long, OrderAttr] = Graph.fromEdges(edges, 1, StorageLevel.MEMORY_AND_DISK_SER,  StorageLevel.MEMORY_AND_DISK_SER)
    
    //边操作：找出图中amount属性大于50的边
    println("找出图中amount属性大于50的边：")
//    graph.edges.filter(e => e.attr.amount > 50).collect().foreach(e => println(s"${e.srcId} to ${e.dstId} att ${e.attr}"))
    graph.edges.filter(e => e.attr.amount > 50).saveAsTextFile("file:///D:/文档/graphx/amount-over-50.txt")
    println
    
    //Degrees操作
    println("找出图中最大的出度、入度、度数：")
    def max(a: (VertexId, Int), b: (VertexId, Int)): (VertexId, Int) = {
      if (a._2 > b._2) a else b
    }
    println("max of outDegrees:" + graph.outDegrees.reduce(max) + " max of inDegrees:" + graph.inDegrees.reduce(max) + " max of Degrees:" + graph.degrees.reduce(max))
    println
    
    val edgeRDD : RDD[((VertexId,VertexId),Long)]  = graph.edges.map { e => ((e.srcId, e.dstId),1) }
//    edgeRDD.reduceByKey { (v1, v2) => v1+v2 }.filter(t => t._2 >1 ).foreach( e => println(s"${e._1} count ${e._2}") );
    edgeRDD.reduceByKey { (v1, v2) => v1+v2 }.filter(t => t._2 >1 ).saveAsTextFile("file:///D:/文档/graphx/cc-over-2.txt");
  }
  
}