package org.example

import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession


object SparkGraphX_data1 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    val user: RDD[(VertexId, (String))] = sc.parallelize(Seq(
      (1L, "red"),
      (2L, "white"),
      (3L, "blue"),
      (4L, "yellow"),
      (5L, "yellow"),
      (6L, "pink")
    ), 1)
    val relationships: RDD[Edge[String]] = sc.parallelize(Seq(
      Edge(1L, 2L, "friend"),
      Edge(1L, 3L, "friend"),
      Edge(2L, 3L, "relative"),
      Edge(3L, 4L, "student"),
      Edge(5L, 4L, "student"),
      Edge(3L, 5L, "boss"),
      Edge(3L, 6L, "client"),
    ),1)
    val socialGraph = Graph(user, relationships)
    import spark.implicits._
    val verticesDF = socialGraph.vertices.map { case (id, name) => (id, name) }
      .toDF("id", "name")
    val edgesDF = socialGraph.edges.map(e => (e.srcId, e.dstId, e.attr))
      .toDF("src", "dst", "relationship")
    verticesDF.write.option("header", "true").csv("output/ding")
    edgesDF.write.option("header", "true").csv("output/bian")
    sc.stop()
  }
}
