package com.test

import org.apache.kudu.client.{CreateTableOptions, DeleteTableResponse}
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

import scala.collection.JavaConverters._
class kuduSparkApiDemo {

  @Test
  def createTable:Unit ={
    val sparkSession: SparkSession = SparkSession.builder()
      .appName("createTable")
      .master("local[2]")
      .getOrCreate()
    val sc: SparkContext = sparkSession.sparkContext
    //sc.setLogLevel("warn")
    val kuduContext = new KuduContext("node01:7051,node02:7051,node03:7051",sc)

    val schema = new StructType(Array(
      StructField("id",IntegerType,false),
      StructField("name",StringType,true),
      StructField("age",IntegerType,false)
    ))

    val options = new CreateTableOptions()
    options.setRangePartitionColumns(List("id").asJava)
    options.setNumReplicas(1)
    if(!kuduContext.tableExists("people")){

      kuduContext.createTable("people",schema,List("id"),options)
    }

  }

  @Test
  def insertData ={
    val sparkSession: SparkSession = SparkSession.builder()
      .master("local[2]").appName("insert").getOrCreate()
    val kuduContext = new KuduContext("node01:7051,node02:7051,node03:7051",sparkSession.sparkContext)

    import sparkSession.implicits._
    val peopleDF = Seq((1,"zhangsan",20),
      (2,"lisi",18),
      (3,"wangwu",24))
      .toDF("id","name","age")
    kuduContext.insertRows(peopleDF,"people")
  }
  @Test
  def selectData = {
    val spark = SparkSession.builder().appName("select").master("local[2]").getOrCreate()
    //val kuduContext = new KuduContext("node01:7051,node02:7051,node03:7051",spark.sparkContext)

    import org.apache.kudu.spark.kudu._
    spark.read.options(Map("kudu.master"->"node01:7051,node02:7051,node03:7051","kudu.table"->"ODS_20190821"))
      .kudu.createTempView("ODS_20190821")
    val dataFrame: DataFrame = spark.sql("select count(1) as num from ODS_20190821")

    dataFrame.show()

  }
  @Test
  def dropTable ={
    val spark: SparkSession = SparkSession.builder().appName("drop").master("local[4]").getOrCreate()
    val kuduContext = new KuduContext("node01:7051,node02:7051,node03:7051",spark.sparkContext)

    val response: DeleteTableResponse = kuduContext.deleteTable("ODS_20190821")
    println(response)
  }

}
