package com.itcj.dmp.utils

import java.util.{Calendar, Date}

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.commons.lang3.time.FastDateFormat
import org.apache.kudu.client.CreateTableOptions
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.kudu.{ColumnSchema, Schema, Type, client}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Dataset, SaveMode, SparkSession}

class KuduHelper {
  var sparkSession: SparkSession = _
  var kuduContext: KuduContext = _
  var dataset: Dataset[_] = _
  var conf = ConfigFactory.load("kudu.conf")


  def this(sparkSession: SparkSession) = {
    this()
    import com.itcj.dmp.utils.SparkConfigHelper._
    this.sparkSession =sparkSession
    this.kuduContext = new KuduContext(conf.getString("kudu.master"), sparkSession.sparkContext)
  }

  def this(dataset: Dataset[_])={
    this(dataset.sparkSession)
    this.dataset=dataset
  }


  //创建表
  def createKuduTable(tablename: String, schema:StructType, keys: List[String], isdelete: Boolean = true):Unit= {

    if (kuduContext.tableExists(tablename)) {
      if (isdelete) {
        kuduContext.deleteTable(tablename)
      } else {
        println("表已经存在创建失败")
        return
      }
    }


    import scala.collection.JavaConverters._
    val options: CreateTableOptions = new CreateTableOptions()
      .setNumReplicas(conf.getInt("kudu.table.factor"))
      .addHashPartitions(keys.asJava, 2)

    kuduContext.createTable(tablename,schema,keys,options)
  }


  //读取表
  def readKuduTable(tablename:String): Option[DataFrame] ={

    import org.apache.kudu.spark.kudu._

    if (kuduContext.tableExists(tablename)) {
      val dataset: DataFrame =sparkSession.read
          .option("kudu.master",conf.getString("kudu.master"))
          .option("kudu.table",tablename)
        .kudu
      Some(dataset)
    }else{
      None
    }
  }


  //将dataset保存到kudu
  def saveKuduTable(tablename:String): Unit ={
    import org.apache.kudu.spark.kudu._
      if (kuduContext.tableExists(tablename)){
        dataset.write.option("kudu.master",conf.getString("kudu.master"))
          .option("kudu.table",tablename)
            .mode(SaveMode.Append)
          .kudu
      }




  }


}


object KuduHelper {
  implicit def spark2Helper(spark: SparkSession): KuduHelper = {
    new KuduHelper(spark)
  }

  implicit def ds2Helper(dataset: Dataset[_]): KuduHelper = {
    new KuduHelper(dataset)
  }

  // 20190720
  def today():String = {
    FastDateFormat.getInstance("yyyyMMdd").format(new Date())
  }


  def yesterday():String = {
    val calendar = Calendar.getInstance()
    calendar.add(Calendar.DATE,-1)
    FastDateFormat.getInstance("yyyyMMdd").format(calendar)
  }

  def main(args: Array[String]): Unit = {
    import com.itcj.dmp.utils.SparkConfigHelper._
    val sparkSession: SparkSession = SparkSession.builder().master("local[2]").appName("KuduHelper").loadConfig().getOrCreate()

    import scala.collection.JavaConverters._
    /*val schema: Schema = new Schema(
      List(
        new ColumnSchema.ColumnSchemaBuilder("id", Type.INT32).key(true).build(),
        new ColumnSchema.ColumnSchemaBuilder("name", Type.STRING).build(),
        new ColumnSchema.ColumnSchemaBuilder("age", Type.INT64).build()
      ).asJava
    )*/

    val schema: StructType =StructType(
      StructField("id", IntegerType, true) ::
        StructField("name", StringType, false) ::
        StructField("age", LongType, false) :: Nil)

    val keys = List("id")

    sparkSession.createKuduTable("student7",schema,keys)

    import sparkSession.implicits._

    val df: DataFrame =Seq(Person(15,"lisi",21),Person(20,"kkk",30)).toDF()
    df.saveKuduTable("student7")

    val readdf: Option[DataFrame] = sparkSession.readKuduTable("student7")
    if (readdf.isDefined) {
      readdf.get.show()
    }

  }
}

