package cd.itcast.spark.kudu.createTable

import java.util

import org.apache.kudu.client
import org.apache.kudu.client.{CreateTableOptions, KuduTable}
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

object SparkKuduTableDemo {
//定义一个创建kudu表单的方法
  def createKuduTable(kuduContext: KuduContext, tablename: String) = {
    //    /    tableName: String,
    //    schema: StructType,
    //    keys: Seq[String],
    //    options: CreateTableOptions): KuduTable
    // a. 定义Schema信息
    val schema: StructType = StructType(Array(
      StructField("id", IntegerType, nullable = true),
      StructField("name", StringType, nullable = true),
      StructField("age", IntegerType, nullable = true),
      StructField("gender", StringType, nullable = true)
    )
    )
    // b. 设置主键Key
    var keys:Seq[String]=Seq("id")
    // c. 设置分区策略及副本数
   val options: CreateTableOptions=new client.CreateTableOptions()
//    addHashPartitions(columns, buckets, 0)
    // 使用哈希分区策略
    val columns: util.List[String] = new util.ArrayList[String]()
    columns.add("id")
    options.addHashPartitions(columns,3)
    // 设置副本数为3，必须为奇数
    options.setNumReplicas(3)
    val kudutable: KuduTable = kuduContext.createTable(tablename,schema,keys,options)
    println(kudutable)



  }

  def main(args: Array[String]): Unit = {
    // TODO: 1、构建SparkSession实例对象sparkConf
    val sparkConf: SparkConf = new SparkConf()
      .setAppName(this.getClass.getSimpleName.stripSuffix("$"))
      .setMaster("local[2]")
    // 采用建造者模式创建SparkSession实例
    val spark: SparkSession = SparkSession.builder()
      .config(sparkConf)
      .getOrCreate()
    // TODO: 2、构建KuduContext实例对象，用以操作Kudu中表的数据
    //    val kuduMaster: String,
    //    sc: SparkContext)
    var kuduMaster: String = "node01:7051,node02:7051,node03:7051"
    val kuduContext: KuduContext = new KuduContext(kuduMaster, spark.sparkContext)

    // TODO：3、创建表
    createKuduTable(kuduContext, "kudu_itcast")
    spark.stop()
  }


}
