package com.tech.common

import com.tech.config.ApplicationConfig
import org.apache.kudu.client.KuduClient.KuduClientBuilder
import org.apache.kudu.client._
import org.apache.kudu.{ColumnSchema, Schema, Type}
import org.apache.log4j.Logger

import java.util
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

object KuduUtil {
  @transient lazy val logger: Logger = Logger.getLogger(getClass)
  @transient lazy val kuduClient: KuduClient = new KuduClientBuilder(ApplicationConfig.KUDU_MASTER_ADDRESSES).build()
  @transient lazy val session: KuduSession = kuduClient.newSession()
  //事件表
  @transient lazy val retaileventTable: KuduTable = kuduClient.openTable("retailevent")
  //用户表
  @transient lazy val customerTable: KuduTable = kuduClient.openTable("customer")
  //offset表
  @transient lazy val offsetTable: KuduTable = {
    //    CREATE TABLE kudu.default.offset_test (
    //      topic varchar WITH ( primary_key = true ),
    //      partition integer WITH ( primary_key = true ),
    //      offset bigint,
    //      create_time timestamp WITH ( nullable = true )
    //    )
    //    WITH (
    //      number_of_replicas = 1,
    //      partition_by_hash_buckets = 4,
    //      partition_by_hash_columns = ARRAY['topic','partition'],
    //    partition_by_range_columns = ARRAY['topic','partition'],
    //    range_partitions = '[{"lower":null,"upper":null}]'
    //    )
    if (!kuduClient.tableExists(ApplicationConfig.KAFKA_OFFSET_TABLE_NAME)) {
      val columns = new util.ArrayList[ColumnSchema]
      //在添加列时可以指定每一列的压缩格式
      columns.add(new ColumnSchema.ColumnSchemaBuilder("topic", Type.STRING).key(true).build)
      columns.add(new ColumnSchema.ColumnSchemaBuilder("partition", Type.INT32).key(true).build)
      columns.add(new ColumnSchema.ColumnSchemaBuilder("offset", Type.INT64).build)
      columns.add(new ColumnSchema.ColumnSchemaBuilder("create_time", Type.UNIXTIME_MICROS).build)
      val schema = new Schema(columns)
      val createTableOptions = new CreateTableOptions
      val hashKeys = new util.ArrayList[String]
      hashKeys.add("topic")
      hashKeys.add("partition")
      val numBuckets = 4
      createTableOptions.addHashPartitions(hashKeys, numBuckets)
      //      createTableOptions.addRangePartition()
      kuduClient.createTable(ApplicationConfig.KAFKA_OFFSET_TABLE_NAME, schema, createTableOptions)
    }
    kuduClient.openTable(ApplicationConfig.KAFKA_OFFSET_TABLE_NAME)
  }
  //规则表
  @transient lazy val ruleTable: KuduTable = kuduClient.openTable("customer_properties")


  /**
   * 获取表schema
   *
   * @param tableName 表名
   * @return Map(字段名 -> 字段类型)
   */
  def getTableSchema(tableName: String): mutable.HashMap[String, String] = {

    val table = tableName match {
      case "retailevent" =>
        retaileventTable
      case "customer" =>
        customerTable
    }

    val schema = table.getSchema

    val iterator = schema.getColumns.iterator()

    val map = mutable.HashMap[String, String]()

    while (iterator.hasNext) {

      val columnSchema = iterator.next()
      map.put(columnSchema.getName, columnSchema.getType.getName)

    }

    map

  }

  def getRule(): mutable.HashMap[String, mutable.HashMap[String, String]] = {

    // 设置查询条件
    val schema = ruleTable.getSchema
    val projectNotNull = KuduPredicate.newIsNotNullPredicate(schema.getColumn("project"))
    val eventKeyNotNull = KuduPredicate.newIsNotNullPredicate(schema.getColumn("event_key"))
    val updateModeNotNull = KuduPredicate.newIsNotNullPredicate(schema.getColumn("update_mode"))
    val builder = kuduClient.newScannerBuilder(ruleTable)
      .addPredicate(projectNotNull)
      .addPredicate(eventKeyNotNull)
      .addPredicate(updateModeNotNull)

    val scanner = builder.build()

    val map = mutable.HashMap[String, mutable.HashMap[String, String]]()

    while (scanner.hasMoreRows) {
      val iterator = scanner.nextRows()
      while (iterator.hasNext) {
        val result = iterator.next()
        val project = result.getString("project")
        val eventKey = result.getString("event_key")
        val projectEventKey = project + "|" + eventKey
        val attributeKey = result.getString("attribute_key")
        val updateMode = result.getString("update_mode")
        if (project != "" && eventKey != "") {
          if (map.contains(projectEventKey)) {
            map(projectEventKey).put(attributeKey, updateMode)
          } else {
            val tmpMap = mutable.HashMap[String, String]()
            tmpMap.put(attributeKey, updateMode)
            map.put(projectEventKey, tmpMap)
          }
        }
      }
    }

    map

  }

  /**
   * 获取上次消费截止的offset+1
   *
   * @return Map(topic-partition -> offset)
   */
  def getOffset(): mutable.HashMap[String, Long] = {

    val builder = kuduClient.newScannerBuilder(offsetTable)

    val scanner = builder.build()

    val map = new mutable.HashMap[String, Long]()

    while (scanner.hasMoreRows) {
      val iterator = scanner.nextRows()
      while (iterator.hasNext) {
        val result = iterator.next()
        val topic = result.getString("topic")
        val partition = result.getInt("partition")
        val offset = result.getLong("offset")
        map.put(topic + "-" + partition, offset)
      }
    }

    map
  }

  /**
   * 更新消费到的offset
   */
  def upsertOffset(topic: String, partition: Int, offset: Long): Unit = {

    val upsert = offsetTable.newUpsert()
    val row = upsert.getRow

    row.addString("topic", topic)
    row.addInt("partition", partition)
    row.addLong("offset", offset + 1)
    row.addLong("create_time", System.currentTimeMillis() * 1000)

    try {
      val result = session.apply(upsert)
      if (result.hasRowError)
        logger.error(s"failed to upsert kafka offset table, data: [${row}], error: [${result.getRowError.getMessage}]")
      session.flush()
    } catch {
      case e: Exception => logger.error(s"failed to upsert kafka offset table, data: [${row}]", e)
    }

  }

  /**
   * 插入数据到kudu
   *
   * @param tableName 表名
   * @param map       插入的map
   * @param schemaMap 表的schema
   */
  def insertRow(tableName: String, map: mutable.HashMap[String, Any], schemaMap: mutable.HashMap[String, String]): Boolean = {

    val table = tableName match {
      case "retailevent" =>
        retaileventTable
      case "customer" =>
        customerTable
    }

    val upsert = table.newUpsert()
    val row = upsert.getRow

    map.foreach(
      i => {
        if (schemaMap.contains(i._1)) {

          schemaMap(i._1) match {
            case "string" =>
              row.addString(i._1, i._2.toString)
            case "unixtime_micros" =>
              row.addLong(i._1, i._2.toString.toLong * 1000)
            case "double" =>
              row.addDouble(i._1, i._2.toString.toDouble)
            case "int32" =>
              row.addInt(i._1, i._2.toString.toInt)
            case _ =>
              logger.error(s"field type match was abnormal when inserting data into kudu. type:[${i._1}], value: [${i._2.toString}]")
              return false
          }
        }
      }
    )
    try {
      val result = session.apply(upsert)
      if (result.hasRowError) {
        logger.error(s"failed to insert data into the ${tableName} table, data: [${row}], error: [${result.getRowError.getMessage}]")
        return false
      }
      session.flush()
    } catch {
      case e: Exception => logger.error(s"failed to insert data into the ${tableName} table, data: [${row}]", e)
        return false
    }
    true
  }

  /**
   * 更新用户表信息
   *
   * @param customer_id 用户id
   * @param create_time 创建时间
   * @param map         更新的map（字段名 -> 字段值）
   */
  def upsertCustomer(project: String, customer_id: String, create_time: Long, map: mutable.HashMap[String, Any], customerSchemaMap: mutable.HashMap[String, String]): Boolean = {

    val nodeUpsert = customerTable.newUpsert()
    nodeUpsert.getRow.addString("project", project)
    nodeUpsert.getRow.addString("customer_id", customer_id)
    nodeUpsert.getRow.addLong("created_time", create_time * 1000)

    map.foreach(
      t => {
        customerSchemaMap(t._1) match {
          case "string" =>
            nodeUpsert.getRow.addString(t._1, t._2.asInstanceOf[String])
          case "unixtime_micros" =>
            nodeUpsert.getRow.addLong(t._1, t._2.toString.toLong * 1000)
          case "double" =>
            nodeUpsert.getRow.addDouble(t._1, t._2.toString.toDouble)
          case "int32" =>
            nodeUpsert.getRow.addDouble(t._1, t._2.toString.toInt)
          case _ =>
            logger.error(s"field type match was abnormal when updating data into kudu. type:[${t._1}], value: [${t._2.toString}]")
            return false
        }
      }
    )

    try {
      val result = session.apply(nodeUpsert)
      if (result.hasRowError) {
        logger.error(s"failed to upsert customer table, data: [${nodeUpsert.getRow}], error: [${result.getRowError.getMessage}]")
        return false
      }
      session.flush()
    } catch {
      case e: Exception => logger.error(s"failed to upsert customer table, data: [${nodeUpsert.getRow}]", e)
        return false
    }
    true
  }

  /**
   * 查询用户详细信息（指定字段）
   *
   * @param customer_id 用户id
   * @param array       指定字段array
   * @return map（指定字段名 -> 字段值）
   */
  def getCustomerDetail(project: String, customer_id: String, array: ArrayBuffer[String]): mutable.HashMap[String, Any] = {

    // 设置查询条件
    val schema = customerTable.getSchema
    val projectId = KuduPredicate.newComparisonPredicate(schema.getColumn("project"), KuduPredicate.ComparisonOp.EQUAL, project)
    val customerId = KuduPredicate.newComparisonPredicate(schema.getColumn("customer_id"), KuduPredicate.ComparisonOp.EQUAL, customer_id)

    // 执行查询操作
    val builder = kuduClient.newScannerBuilder(customerTable).setProjectedColumnNames(array.asJava)
    builder.addPredicate(projectId)
    builder.addPredicate(customerId)

    val scanner = builder.build()

    val map = mutable.HashMap[String, Any]()
    while (scanner.hasMoreRows) {
      val iterator = scanner.nextRows()
      while (iterator.hasNext) {
        val result = iterator.next()
        array.foreach(
          i => {
            if (!result.isNull(i)) {
              map.put(i, schema.getColumn(i).getType.getName match {
                case "double" => result.getDouble(i)
                case "string" => result.getString(i)
                case "unixtime_micros" => result.getLong(i) / 1000
                case "int32" => result.getInt(i)
                case _ => null
              })
            }
          }
        )
      }
    }
    scanner.close()

    map
  }

  /**
   * 获取用户详细信息
   *
   * @param customer_id 用户id
   * @return RowResult
   */
  def getCustomerDetail(project: String, customer_id: String): RowResult = {

    // 设置查询条件
    val schema = customerTable.getSchema
    //    val project = KuduPredicate.newComparisonPredicate(schema.getColumn("project"), KuduPredicate.ComparisonOp.EQUAL, "")
    val projectId = KuduPredicate.newComparisonPredicate(schema.getColumn("project"), KuduPredicate.ComparisonOp.EQUAL, project)
    val customerId = KuduPredicate.newComparisonPredicate(schema.getColumn("customer_id"), KuduPredicate.ComparisonOp.EQUAL, customer_id)

    // 执行查询操作
    val scanner = kuduClient.newScannerBuilder(customerTable)
      .addPredicate(projectId)
      .addPredicate(customerId)
      .build()
    var result: RowResult = null
    while (scanner.hasMoreRows) {
      val iterator = scanner.nextRows()
      while (iterator.hasNext) {
        result = iterator.next()
      }
    }
    scanner.close()

    result
  }
}
