import org.apache.commons.math3.geometry.spherical.twod


import org.apache.spark.graphx
import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object SparkGraphX {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    //graphX建图步骤：建顶点 建边 再建图
    val users:RDD[(VertexId,(String))]=sc.parallelize(Seq(
      (1L,"red"),
      (2L,"white"),
      (3L,"blue"),
      (4L,"green"),
      (5L,"yellow"),
      (6L,"pink")
    ),1)
    val relationships:RDD[Edge[String]]=sc.parallelize(Seq(
      Edge(1L,2L,"friend"),
      Edge(1L,3L,"friend"),
      Edge(2L,3L,"relative"),
      Edge(3L,4L,"student"),
      Edge(5L,4L,"student"),
      Edge(3L,5L,"boss"),
      Edge(3L,6L,"client")
    ),1)
    val socialGraph=graphx.Graph(users,relationships)

    //导出图数据为csv
    import spark.implicits._
    val verticesDF=socialGraph.vertices.map{case (id,name) => (id,name)}
      .toDF("id","name")
    val edgesDF=socialGraph.edges.map(e =>(e.srcId,e.dstId,e.attr))
      .toDF("src","dst","relationship")
    verticesDF.write.option("header","true").csv("output/ding")
    edgesDF.write.option("header","true").csv("output/bian")
    sc.stop()
  }

}
