package com.spark.kudu

/**
  * Created by fab.yin on 22/06/2017.
  */

import org.apache.kudu.spark.kudu._
import org.apache.spark.sql.{Row, SparkSession}
import com.spark.model.Customer
import com.spark.util.PropUtil
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

object KuduUtils {
  def main(args: Array[String]): Unit = {
    // Initial config
    val prop = new PropUtil("config.properties")

    // Quickly prepare a Kudu table we will use as our source table in Spark
    // SQL.
    // First, some sample data
    val srcTableData = Array(new Customer(27, "vhen"), new Customer(28, "dido"))

    // Specify Kudu table name we will be inserting into
    val kuduTableName = "fab"

    // Init SparkSession
    val sparkSession = SparkSession.builder.
      master("local")
      .appName("spark session example")
      .getOrCreate()

    // Create Data Frame
    //val rdd = sparkSession.sparkContext.parallelize(srcTableData).map(x => Row.fromSeq(Seq(x)))
    var rdd = sparkSession.sparkContext.parallelize(srcTableData).map(x => Row(x.cid, x.cname))

    val schema = StructType(
      Seq(
        StructField("id", IntegerType, true)
        , StructField("name", StringType, true)
      )
    )

    val df = sparkSession.createDataFrame(rdd, schema)

    // Register your table as a Spark SQL table.
    df.createOrReplaceTempView("source_table")

    // Remember that kuduOptions stores the kuduTableName already as well as
    // the list of Kudu masters.
    val kuduOptions = Map("kudu.master" -> prop.getProp("KUDU_MASTER"), "kudu.table" -> "fab")
    sparkSession.read.options(kuduOptions).kudu.createOrReplaceTempView(kuduTableName)

    // Use Spark SQL to INSERT (treated as UPSERT by default) into Kudu table
    sparkSession.sql("INSERT INTO TABLE fab SELECT * FROM source_table")

    // See results of our insert
    sparkSession.read.options(kuduOptions).kudu.show()
  }
}
