import org.apache.spark.SparkConf
import org.apache.spark.sql._

/**
  * @author dinghao 
  * @create 2021-08-20 10:53 
  * @message
  */
object NebulaSource {
  def main(args: Array[String]): Unit = {
    write()
    read()
  }

  def write():Unit={
    val sparkSession = SparkSession.builder()
      .config(new SparkConf().setAppName("NebulaSource").setMaster("local[*]"))
      .getOrCreate()

    val dataFrame = sparkSession.createDataFrame(Seq(("1","dh"),("2","zyh"))).toDF("id","name")

    dataFrame.write.nebula(
      ConnectMessage.get("nebula.address"),
      ConnectMessage.get("nebula.space"),
      ConnectMessage.get("nebula.partitionNum")
    )
  }

  def read():Unit={
    val sparkSession = SparkSession.builder()
      .config(new SparkConf().setAppName("NebulaSource").setMaster("local[*]"))
      .getOrCreate()
    //点
    val vertexDataFrame = sparkSession.read.nebula(ConnectMessage.get("nebula.address"), ConnectMessage.get("nebula.space"), ConnectMessage.get("nebula.partitionNum"))
      .loadVerticesToDF(ConnectMessage.get("nebula.tag"), "*")
    vertexDataFrame.show()
    //边
    val edgeDataFrame = sparkSession.read.nebula(ConnectMessage.get("nebula.address"), ConnectMessage.get("nebula.space"), ConnectMessage.get("nebula.partitionNum"))
      .loadEdgesToDF(ConnectMessage.get("nebula.edge"), ConnectMessage.get("nebula.fields"))

    edgeDataFrame.show()
  }
}
