package hefei

import org.apache.spark._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.graphx._
import org.apache.spark.graphx.Graph._
import org.apache.spark.rdd.RDD
import org.apache.spark.graphx.util.GraphGenerators

/**
  * Created by Administrator on 2017/4/21 0021.
  */
object TestSlashdot {
  def main(args: Array[String]): Unit = {
    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    val g = GraphLoader.edgeListFile(sc,"Slashdot0811.txt").cache()
    val g2 = Graph(g.vertices, g.edges.map( e => if (e.srcId < e.dstId) e else new Edge(e.dstId, e.srcId, e.attr))).partitionBy(PartitionStrategy.RandomVertexCut)
    val xxx = (0 to 6).map(i => g2.subgraph(vpred = (vid,_) => vid >= i*10000 && vid < (i+1)*10000).triangleCount().vertices.map(_._2).reduce(_ + _))
    println(xxx)
  }
}
