package com.demo.bigdata.hudi

import org.apache.hudi.DataSourceReadOptions._
import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.QuickstartUtils._
import org.apache.hudi.config.HoodieWriteConfig._
import org.apache.spark.sql.SaveMode._
import org.apache.spark.sql.SparkSession

import scala.collection.JavaConversions._

object HudiDemo {
  val tableName = "hudi_trips_cow"
  val basePath = "hdfs:///user/hive/warehouse/hudi_trips_cow"
  val dataGen = new DataGenerator

  def main(args: Array[String]): Unit = {
    val spark = initSpark()
    insertData(spark)
    printSchemaAndData(spark)

    updateDate(spark)
    printSchemaAndData(spark)

    incrementalQuery(spark)
    pointInTimeQuery(spark)

    deleteDate(spark)
    printSchemaAndData(spark)
  }

  def initSpark(): SparkSession = {
    SparkSession
      .builder()
      .config("spark.sql.warehouse.dir","/user/hive/warehouse")
      .enableHiveSupport()
      .appName("Hive Demo")
      .master("local")
      .getOrCreate()
  }

  def insertData(spark: SparkSession): Unit = {
    val inserts = convertToStringList(dataGen.generateInserts(10))
    val df = spark.read.json(spark.sparkContext.parallelize(inserts, 2))
    df.write.format("hudi").
      options(getQuickstartWriteConfigs).
      option(PRECOMBINE_FIELD.key(), "ts").
      option(RECORDKEY_FIELD.key(), "uuid").
      option(PARTITIONPATH_FIELD.key(), "partitionpath").
      option(TBL_NAME.key(), tableName).
      mode(Overwrite).
      save(basePath)
  }

  def updateDate(spark: SparkSession): Unit = {
    val updates = convertToStringList(dataGen.generateUpdates(10))
    val df = spark.read.json(spark.sparkContext.parallelize(updates, 2))
    df.write.format("hudi").
      options(getQuickstartWriteConfigs).
      option(PRECOMBINE_FIELD.key(), "ts").
      option(RECORDKEY_FIELD.key(), "uuid").
      option(PARTITIONPATH_FIELD.key(), "partitionpath").
      option(TBL_NAME.key(), tableName).
      mode(Append).
      save(basePath)
  }

  def deleteDate(spark: SparkSession): Unit = {
    val ds = spark.sql("select uuid, partitionpath from hudi_trips_snapshot").limit(2)
    val deletes = dataGen.generateDeletes(ds.collectAsList())
    val hardDeleteDf = spark.read.json(spark.sparkContext.parallelize(deletes, 2))
    hardDeleteDf.write.format("hudi").
      options(getQuickstartWriteConfigs).
      option(OPERATION.key(), "delete").
      option(PRECOMBINE_FIELD.key(), "ts").
      option(RECORDKEY_FIELD.key(), "uuid").
      option(PARTITIONPATH_FIELD.key(), "partitionpath").
      option(TBL_NAME.key(), tableName).
      mode(Append).
      save(basePath)
  }

  def printSchemaAndData(spark: SparkSession): Unit = {
    val updates = convertToStringList(dataGen.generateUpdates(10))
    val df = spark.read.json(spark.sparkContext.parallelize(updates, 2))
  }

  def getCommits(spark: SparkSession): Array[String] = {
    spark.
      read.format("hudi").
      load(basePath).
      createOrReplaceTempView("hudi_trips_snapshot")
    import spark.implicits._
    spark.sql("select distinct(_hoodie_commit_time) as commitTime from  hudi_trips_snapshot order by commitTime").map(k => k.getString(0)).take(50)
  }

  def loadDate(spark: SparkSession): Unit = {
    val df = spark.
      read.format("hudi").
      load(basePath)
    df.createOrReplaceTempView("hudi_trips_incremental")
    spark.sql("select `_hoodie_commit_time`, fare, begin_lon, begin_lat, ts from  hudi_trips_incremental where fare > 20.0").show()
  }

  def timeTravelQuery(spark: SparkSession, instant: String): Unit = {
    val instant = getCommits(spark)(0)
    val df = spark.read.
      format("hudi").
      option("as.of.instant", instant).
      load(basePath)
    df.createOrReplaceTempView("hudi_trips_incremental")
    spark.sql("select `_hoodie_commit_time`, fare, begin_lon, begin_lat, ts from  hudi_trips_incremental where fare > 20.0").show()
  }

  def incrementalQuery(spark: SparkSession): Unit = {
    val commits = getCommits(spark)
    val beginTime = commits(commits.length - 2)
    val tripsIncrementalDF = spark.read.format("hudi").
      option(QUERY_TYPE.key(), QUERY_TYPE_INCREMENTAL_OPT_VAL).
      option(BEGIN_INSTANTTIME.key(), beginTime).
      load(basePath)
    tripsIncrementalDF.createOrReplaceTempView("hudi_trips_incremental")
    spark.sql("select `_hoodie_commit_time`, fare, begin_lon, begin_lat, ts from  hudi_trips_incremental where fare > 20.0").show()
  }

  def pointInTimeQuery(spark: SparkSession): Unit = {
    val commits = getCommits(spark)(0)
    val beginTime = "000" // Represents all commits > this time.
    val endTime = commits(commits.length - 2) // commit time we are interested in
    val tripsPointInTimeDF = spark.read.format("hudi").
      option(QUERY_TYPE.key(), QUERY_TYPE_INCREMENTAL_OPT_VAL).
      option(BEGIN_INSTANTTIME.key(), beginTime).
      option(END_INSTANTTIME.key(), endTime).
      load(basePath)
    tripsPointInTimeDF.createOrReplaceTempView("hudi_trips_point_in_time")
    spark.sql("select `_hoodie_commit_time`, fare, begin_lon, begin_lat, ts from hudi_trips_point_in_time where fare > 20.0").show()
  }
}
