package com.hrt.kuducode

import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
  *  SparkSql API 操作Kudu
  */
object SparkKudu2 {
  def main(args: Array[String]): Unit = {
    val session: SparkSession = SparkSession.builder().master("local").appName("test").getOrCreate()

    //读取Kudu中的数据
//    val frame: DataFrame = session.read.option("kudu.master", "cm1:7051,cm2:7051,cm3:7051")
//      .option("kudu.table", "spark_t1").format("kudu").load()
//    frame.show()

    val list = List[PersonInfo](
      PersonInfo(5,"a1",10,100),
      PersonInfo(6,"a2",11,101)
    )
    import session.implicits._
    val frame: DataFrame = list.toDF()

    //将数据写往Kudu
    frame.write.mode(SaveMode.Append).option("kudu.master", "cm1:7051,cm2:7051,cm3:7051")
      .option("kudu.table", "spark_t1").format("kudu").save()


    val result: DataFrame = session.read.option("kudu.master", "cm1:7051,cm2:7051,cm3:7051")
      .option("kudu.table", "spark_t1").format("kudu").load()
    result.show()






  }

}
