package com.example.routes

import org.apache.spark.sql.{SaveMode, SparkSession}

object Alluxiorw {
  def jsonHDFS2Alluxio(session: SparkSession): Unit = {
    val outputPath = "alluxio://192.168.20.208:19998/tmp/testv.json"
    val vDF = session.read.json("hdfs://localhost:9000/tmp/testv.json")
    vDF.write.mode("overwrite").json(outputPath)

    val outputPath2 = "alluxio://192.168.20.208:19998/tmp/teste.json"
    val eDF = session.read.json("hdfs://localhost:9000/tmp/teste.json")
    eDF.write.mode("overwrite").json(outputPath2)
  }

  def HDFSJson2Parquet(session: SparkSession) = {
    val outputPath = "hdfs://localhost:9000/tmp/jsongrid/grid_parquet"
    val vDF = session.read.json("hdfs://localhost:9000/tmp/jsongrid/grids")
    vDF.write.mode("overwrite").parquet(outputPath)

    val outputPath2 = "hdfs://localhost:9000/tmp/jsongrid/tmp/edge_parquet"
    val eDF = session.read.json("hdfs://localhost:9000/tmp/jsongrid/edges")
    eDF.write.mode("overwrite").parquet(outputPath2)
  }

  def parquetHDFS2Alluxio(session: SparkSession) = {
    val outputPath = "alluxio://192.168.20.208:19998/tmp/grid_parquet"
    val vDF = session.read.parquet("hdfs://localhost:9000/tmp/jsongrid/grid_parquet")
    vDF.write.mode(SaveMode.Overwrite).parquet(outputPath)

    val outputPath2 = "alluxio://192.168.20.208:19998/tmp/edge_parquet"
    val eDF = session.read.parquet("hdfs://localhost:9000/tmp/jsongrid/tmp/edge_parquet")
    eDF.write.mode(SaveMode.Overwrite).parquet(outputPath2)
  }

  def readJsonFromAlluxio(session: SparkSession): Unit = {
    val vPath = "alluxio://192.168.20.208:19998/tmp/testv.json"
    val vDF = session.read.json(vPath)
    vDF.show(10)

    val ePath = "alluxio://192.168.20.208:19998/tmp/teste.json"
    val eDF = session.read.json(ePath)
    eDF.show(10)
  }

  def readParquetFromAlluxio(session: SparkSession) = {
    val vPath = "alluxio://192.168.20.208:19998/tmp/grid_parquet"
    val vDF = session.read.parquet(vPath)
    vDF.show(10)

    val ePath = "alluxio://192.168.20.208:19998/tmp/edge_parquet"
    val eDF = session.read.parquet(ePath)
    eDF.show(10)
  }

  def main(args: Array[String]): Unit = {
    println("Hello world!")
    val session = SparkSession.builder()
      .appName("RouteMap")
      .master("local[*]")
      .config("spark.driver.memory", "2g")  // try to fix java.lang.OutOfMemoryError: GC overhead limit exceeded
      .config("spark.testing.memory", "471859200")
      .getOrCreate()
    session.sparkContext.setLogLevel("INFO")

    //testW(session)
    //testR(session)
    //testW2(session)
    //testW3(session)
    readParquetFromAlluxio(session)
    session.stop()
  }
}
