package SparkGraphXInAction

import org.apache.spark._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.graphx._
import org.apache.spark.graphx.Graph._
import org.apache.spark.rdd.RDD
import org.apache.spark.graphx.util.GraphGenerators

/**
  * Created by Administrator on 2017/5/2 0002.
  */
object TestMergeGraph {
  def mergeGraphx(g1:Graph[String, String], g2:Graph[String, String]) ={
    val v = g1.vertices.map(_._2).union(g2.vertices.map(_._2)).distinct.zipWithIndex
    def edgesWithNewVertexIds(g:Graph[String,String]) =
      g.triplets
        .map(et => (et.srcAttr, (et.attr, et.dstAttr)))
        .join(v)
        .map(x => (x._2._1._2, (x._2._2, x._2._1._1)))
        .join(v)
        .map(x => new Edge(x._2._1._1, x._2._2, x._2._1._2))
    Graph(v.map(_.swap), edgesWithNewVertexIds(g1).union(edgesWithNewVertexIds(g2)))
  }

  def main(args: Array[String]): Unit = {
    // 屏蔽日志
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)

    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)

    val philosophers = Graph(
      sc.makeRDD(Seq((1L,"Aristotle"), (2L,"Plato"), (3L,"Socrates"), (4L,"male"))),
      sc.makeRDD(Seq(Edge(2L,1L,"Influences"), Edge(3L,2L,"Influences"), Edge(3L,4L,"hasGender"))))
    val rdfGraph = Graph(sc.makeRDD(Seq((1L,"wordnet_philosophers"), (2L,"Aristotle"),(3L,"Plato"),(4L,"Socrates"))),
      sc.makeRDD(Seq(Edge(2L,1L,"rdf:type"), Edge(3L,1L,"rdf:type"), Edge(4L,1L,"rdf:type"))))

    val combined = mergeGraphx(philosophers,rdfGraph)

    combined.triplets.foreach(t => println(s"${t.srcAttr}----${t.attr}------->${t.dstAttr}"))

  }
}
