package org.example
import org.apache.spark.graphx.{Edge,Graph,VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object SparkGraphX_data2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc =spark.sparkContext
     val users:RDD[(VertexId,String)]=sc.parallelize(Seq(
      (1L,"张三"),
      (2L,"李四"),
      (3L,"王五"),
      (4L,"小六"),
      (5L,"老七")
      ))
    val relationships:RDD[Edge[String]]=sc.parallelize(Seq(
      Edge(1L,2L,"friend"),
      Edge(1L,3L,"colleague"),
      Edge(2L,3L,"friend"),
      Edge(3L,4L,"client"),
      Edge(4L,5L,"boss"),
      Edge(5L,3L,"boss"),
      Edge(5L,3L,"employer")
    ))
    val socialGraph=Graph(users,relationships)
    val degrees=socialGraph.degrees.collect().mkString(",")
    println(s"节点度数:$degrees")

    import  spark.implicits._
    val verticesDF=socialGraph.vertices.map{case (id,name)=>(id,name)}
      .toDF("id","name")
    val edgesDF=socialGraph.edges.map(e=>(e.srcId,e.dstId,e.attr))
      .toDF("src","dst","relationship")
    verticesDF.write.option("header","true").csv("output/vertices")
    edgesDF.write.option("header","true").csv("output/edges")
    sc.stop()
  }
}
