import org.apache.kudu.client.CreateTableOptions
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

/**
  * @title: SparkKuduTest
  * @projectName kuduMaster
  * @description: TODO
  * @author Administrator
  * @date 2019/11/12 15:33
  */
object SparkKuduTest {
  def main(args: Array[String]): Unit = {
    //    val sparkConf: SparkConf = new SparkConf().setAppName("SparkKuduTest").setMaster("local[2]")
    val sparkSession: SparkSession = SparkSession.builder().appName("SparkKuduTest").master("local[2]").getOrCreate()
    val sc: SparkContext = sparkSession.sparkContext
    sc.setLogLevel("ERROR")

    val kuduContext: KuduContext = new KuduContext("node01:7051,node02:7051,node03:7051", sc)
    createTable(kuduContext)

  }

  private def createTable(kuduContext: KuduContext) = {
    //1.1定义表名
    val tableName = "spark_kudu"
    //1.2 定义表的schema
    val schema = StructType(
      StructField("userId", StringType, false) ::
        StructField("name", StringType, false) ::
        StructField("age", IntegerType, false) ::
        StructField("sex", StringType, false) :: Nil)
    //1.3 定义表的主键
    val primaryKey = Seq("userId")
    //1.4 定义分区的schema
    val options = new CreateTableOptions
    //设置分区
    import scala.collection.JavaConverters._
    options.setRangePartitionColumns(List("userId").asJava)
    //设置副本
    options.setNumReplicas(1)
    //1.5 创建表
    if (!kuduContext.tableExists(tableName)) {
      kuduContext.createTable(tableName, schema, primaryKey, options)
    }
  }
}
