package com.carol.bigdata.kudu

import com.carol.bigdata.utils.{Flag, TimeUtil}
import org.apache.kudu.{ColumnSchema, Common, Type}
import org.apache.kudu.client.{AlterTableOptions, CreateTableOptions, PartialRow}
import org.apache.kudu.spark.kudu.{KuduContext, KuduDataFrameReader, KuduDataFrameWriter}
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.types.{BooleanType, DoubleType, FloatType, IntegerType, StringType, StructField, StructType, TimestampType}
import java.sql.Timestamp

import com.carol.bigdata.Config
import com.carol.bigdata.utils.Flag

import scala.collection.JavaConverters.{asScalaBufferConverter, seqAsJavaListConverter}
import scala.collection.mutable

object KuduUtil {
    // 创建kudu表
    def createTable(kuduContext: KuduContext,
                    tableName: String,
                    schema: StructType,
                    keySeq: Seq[String],
                    partitionList: List[String],
                    bucketNum: Int = 2,
                    replicaNum: Int = 1): Unit = {
        //判断表是否存在
        if (!kuduContext.tableExists(tableName)) {
            // 表不存在，创建表
            val createTableOptions = new CreateTableOptions()
            // 设置哈希分区、哈希桶数(大于2)及副本数
            createTableOptions.addHashPartitions(partitionList.asJava, bucketNum).setNumReplicas(replicaNum)
            // 创建表
            kuduContext.createTable(tableName, schema, keySeq, createTableOptions)
            println(s"==== create Kudu Table: $tableName ====")
        }
    }
    
    // 删除kudu表
    def deleteTable(kuduContext: KuduContext, tableName: String): Unit = {
        if (kuduContext.tableExists(tableName)) {
            // 表存在,则删除
            kuduContext.deleteTable(tableName)
            println(s"==== delete Kudu Table:$tableName ====")
        } else {
            println(s"==== Kudu Table:$tableName not exist ====")
        }
    }
    
    // 插入数据,append模式,对已有key的数据自动更新
    def upsertTable(kuduContext: KuduContext, tableName: String, df: DataFrame): Unit = {
        kuduContext.upsertRows(df, tableName)
    }
    
    // 插入数据,append模式,对已有key的数据自动更新
    def upsertTable2(tableName: String, df: DataFrame, kuduMaster: String): Unit = {
        df.write.options(Map(
            "kudu.master" -> kuduMaster,
            "kudu.table" -> tableName,
            "kudu.operation.timeout.ms" -> "1000000"))
          .mode("append").kudu
    }
    
    // 插入数据,append模式,对已有key的数据自动更新
    def alterRow(kuduContext: KuduContext, tableName: String): Unit = {
        val client = kuduContext.syncClient
        val table = kuduContext.syncClient.openTable(tableName)
        val session = client.newSession()
        val upsert = table.newUpsert()
        val row: PartialRow = upsert.getRow
        row.addString("uid","uid_0")
        row.addString("ip","192.168.117.19")
        session.apply(upsert)
    }
    
    // 读取kudu表
    def readKudu(spark: SparkSession,tableName: String, kuduMaster: String): DataFrame ={
        val kuduDF: DataFrame = spark.read.options(Map(
            "kudu.master" -> kuduMaster,
            "kudu.table" -> tableName,
            "kudu.operation.timeout.ms" -> "1000000")).kudu
        kuduDF
    }
    
    def getSchema(kuduContext: KuduContext,
                  tableName: String
                 ): (StructType, List[String], List[String], List[String], List[String], List[String], List[String]) = {
        var list: List[(String, String)] = List[(String, String)]()
        var fieldsList = List[StructField]()
        // 动态获取该表的表字段结构
        val table = kuduContext.syncClient.openTable(tableName)
        val schema = table.getSchema
        val columns: mutable.Seq[ColumnSchema] = schema.getColumns.asScala
        columns.foreach(x => {
            val name = x.getName
            val datatype = x.getType.getDataType.toString
            list +:= (name, datatype)
        })
        //list.foreach(println)
        // 将kudu type转换成spark type
        list.foreach {
            case x if x._2.toLowerCase.contains("string")          => fieldsList +:= StructField(x._1, StringType)
            case x if x._2.toLowerCase.contains("int")             => fieldsList +:= StructField(x._1, IntegerType)
            case x if x._2.toLowerCase.contains("double")          => fieldsList +:= StructField(x._1, DoubleType)
            case x if x._2.toLowerCase.contains("float")           => fieldsList +:= StructField(x._1, FloatType)
            case x if x._2.toLowerCase.contains("bool")            => fieldsList +:= StructField(x._1, BooleanType)
            case x if x._2.toLowerCase.contains("unixtime_micros") => fieldsList +:= StructField(x._1, TimestampType)
        }
        val schemas: StructType = StructType(fieldsList)
        val fields: List[StructField] = schemas.toList
        
        //val stringFields = fields.filter(_.dataType == StringType).map(_.name)
        val intStampFields = fields.filter(_.dataType == IntegerType).map(_.name)
        val doubleFields = fields.filter(_.dataType == DoubleType).map(_.name)
        val floatFields = fields.filter(_.dataType == FloatType).map(_.name)
        val boolFields = fields.filter(_.dataType == BooleanType).map(_.name)
        val timeStampFields = fields.filter(_.dataType == TimestampType).map(_.name)
        val columnList = fields.map(_.name)
        //println(schemas)
        (schemas, intStampFields, doubleFields, floatFields, boolFields, timeStampFields, columnList)
    }
    
    // 修改表结构
    def alterTable(kuduContext: KuduContext,
                   tableName: String,
                   alterTableOptions: AlterTableOptions): Unit = {
        val client = kuduContext.syncClient
        client.alterTable(tableName, alterTableOptions)
    }
    
    // 测试创建表
    def test_createTable(kuduContext: KuduContext): Unit = {
        // 表字段结构
        val testSchema: StructType = StructType(
            StructField("game_id", StringType, false) ::
                StructField("uid", StringType, false) ::
                StructField("uid_type", StringType, false) ::
                StructField("ads_type", StringType, true) ::
                StructField("day", StringType, false) ::
                StructField("agent", StringType, true) ::
                StructField("ip", StringType, true) ::
                StructField("timestamp", TimestampType, true) ::
                StructField("time", TimestampType, true) ::
                StructField("timezone", StringType, true) ::
                StructField("year", StringType, true) ::
                StructField("month", StringType, true) ::
                StructField("week", StringType, true) ::
                StructField("hour", StringType, true) ::
                StructField("minute", StringType, true) :: Nil
            )
        val tableName = "user_profile_5"
        val keySeq = Seq("uid", "uid_type")
        val partitionList = List("uid_type")
        createTable(kuduContext, tableName, testSchema, keySeq, partitionList)
    }
    
    // 测试删除表
    def test_deleteTable(kuduContext: KuduContext): Unit = {
        val tableName = "user_info2"
        //deleteTable(kuduContext, "user_profile_2")
        //deleteTable(kuduContext, "user_profile_3")
        //deleteTable(kuduContext, "user_profile_4")
        deleteTable(kuduContext, "user_profile_5")
        //deleteTable(kuduContext, "user_profile_6")
        //deleteTable(kuduContext, "user_profile_7")
        //deleteTable(kuduContext, "user_profile_8")
        //deleteTable(kuduContext, "user_profile_9")
        //deleteTable(kuduContext, "user_profile_10")
        //deleteTable(kuduContext, "user_profile_100")
    }
    
    // 测试插入数据
    def test_upsertTable(kuduContext: KuduContext): Unit = {
        val spark: SparkSession = SparkSession.builder()
                                              .config(conf = Config.sparkConf)
                                              .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                                              .getOrCreate()
        
        import spark.implicits._
        val valueDF: DataFrame = Seq(("张六", "男"), ("李四", "女")).toDF("name", "sex")
        val tableName = "test"
        valueDF.show(false)
        upsertTable2(tableName, valueDF, kuduContext.kuduMaster)
    }
    
    def test_getSchema(kuduContext: KuduContext): Unit = {
        val tableName = "rebecca10"
        getSchema(kuduContext, tableName)
    }
    
    def test_alterTable(kuduContext: KuduContext): Unit = {
        val tableName = "user_profile_9"
        val alterTableOptions: AlterTableOptions = new AlterTableOptions()
        // 添加字段
        alterTableOptions.addColumn("ads_type", kuduContext.kuduType(StringType), "-1")
        alterTableOptions.addColumn("current_level", kuduContext.kuduType(StringType), "-1")
        // 删除字段
        //alterTableOptions.dropColumn("advertiser")
        alterTable(kuduContext, tableName, alterTableOptions)
    }
    
    def main(args: Array[String]): Unit = {
        // 命令行参数
        Flag.Parse(args)
        val sc = new SparkContext(Config.sparkConf)
        sc.setLogLevel(Config.logLevel)
        val kuduContext: KuduContext = new KuduContext("slaves01:7051", sc)
    
        val spark: SparkSession = SparkSession.builder()
                                              .config(conf = Config.sparkConf)
                                              .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                                              .getOrCreate()
        
        // 测试创建表
        //test_createTable(kuduContext)
        
        // 测试删除表
        //test_deleteTable(kuduContext)
        
        // 测试插入数据
        //test_upsertTable(kuduContext)
        
        // 测试参数获取表结构
        //test_getSchema(kuduContext)
        
        // 测试修改表结构
        //test_alterTable(kuduContext)
    
        // 测试根据主键修改指定列的值
        //alterRow(kuduContext,"test")
        
        // 测试读取kudu表数据
        //readKudu(spark,"user_profile_5").show(10,false)
    }
}
