package com.jianghang.class_three.log_format.APP

import java.util.Locale
import java.util.zip.CRC32

import org.apache.commons.lang3.StringUtils
import org.apache.commons.lang3.time.FastDateFormat
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HColumnDescriptor, HTableDescriptor, TableName}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.udf

/**
  * ip country province city time method url protocal http_status bytes_sent referer
  * user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
  *
  * 优化
  *   关闭wal ,把数据刷入磁盘优化
  *
  */
object _031_LogFormatToSaveHBase{

  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder()
      .master("local[4]")
      .appName("_030_Log")
      .getOrCreate()

    val day = "20190230"
    //val day = args(0)

    var logDF = session.read.json("data/output/LogFormatApp2/*")

    // UDF函数的写法
    def formatTime() = udf(
      (time: String) => {
        //[30/Jan/2019:00:00:21 +0800]
        val beginIndex = time.indexOf("[") + 1
        val endIndex = time.lastIndexOf("]")
        val time_str = time.substring(beginIndex, endIndex)
        val time_long = FastDateFormat.getInstance("dd/MMM/yyyy:HH:mm:ss Z", Locale.ENGLISH)
          .parse(time_str).getTime
        val time_format_str = FastDateFormat.getInstance("yyyyMMddHHmm").format(time_long)
        time_format_str
      }
    )

    //添加dataFrame新列formattime，或者替换原有的列
    logDF = logDF.withColumn("formattime", formatTime()(logDF("time")))

    logDF.show()


    val hbaseInfoRDD = logDF.rdd.map(x => {
      val ip = x.getAs[String]("ip")
      val country = x.getAs[String]("country")
      val province = x.getAs[String]("province")
      val city = x.getAs[String]("city")
      val formattime = x.getAs[String]("formattime")
      val method = x.getAs[String]("method")
      val url = x.getAs[String]("url")
      val protocal = x.getAs[String]("protocal")
      val http_status = x.getAs[String]("http_status")
      val bytes_sent = x.getAs[String]("bytes_sent")
      val referer = x.getAs[String]("referer")
      val browser_name = x.getAs[String]("browser_name")
      val browser_version = x.getAs[String]("browser_version")
      val os_name = x.getAs[String]("os_name")
      val user_agent = x.getAs[String]("user_agent")

      /**
        * ip country province city time method url protocal http_status bytes_sent referer
        * user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
        */
      
      val columns = scala.collection.mutable.HashMap[String,String]()
      columns.put("ip",ip)
      columns.put("country",country)
      columns.put("province",province)
      columns.put("city",city)
      columns.put("formattime",formattime)
      columns.put("method",method)
      columns.put("url",url)
      columns.put("protocal",protocal)
      columns.put("http_status",http_status)
      columns.put("bytes_sent",bytes_sent)
      columns.put("referer",referer)
      columns.put("browser_name",browser_name)
      columns.put("browser_version",browser_version)
      columns.put("os_name",os_name)


      // HBase API  Put

      val rowkey = getRowKey(day, referer+url+ip+user_agent)  // HBase的rowkey
      val put = new Put(Bytes.toBytes(rowkey)) // 要保存到HBase的Put对象

      // 每一个rowkey对应的cf中的所有column字段
      for((k,v) <- columns) {
        if (v == null){
          put.addColumn(Bytes.toBytes("log_format"), Bytes.toBytes(k.toString), Bytes.toBytes("-"))
        }else{
          put.addColumn(Bytes.toBytes("log_format"), Bytes.toBytes(k.toString), Bytes.toBytes(v.toString))
        }

      }
      put.setDurability(Durability.SKIP_WAL) //禁用WAL
      (new ImmutableBytesWritable(rowkey.getBytes), put)
    })


    val conf = new Configuration()
    conf.set("hbase.rootdir","hdfs://hadoop000:8020/hbase")
    conf.set("hbase.zookeeper.quorum","hadoop000:2181")

    val tableName = createTable(day, conf)

    // 设置写数据到哪个表中
    conf.set(TableOutputFormat.OUTPUT_TABLE, tableName)

    // 保存数据
    hbaseInfoRDD.saveAsNewAPIHadoopFile(
      "hdfs://hadoop000:8020/etl/access/hbase",
      classOf[ImmutableBytesWritable],
      classOf[Put],
      classOf[TableOutputFormat[ImmutableBytesWritable]],
      conf
    )

    flushTable(tableName, conf)//把数据写入磁盘，避免关闭wal后数据丢失。
    session.stop()
  }

  def flushTable(table:String, conf:Configuration): Unit = {

    var connection:Connection = null
    var admin:Admin = null
    try {
      connection = ConnectionFactory.createConnection(conf)
      admin = connection.getAdmin

      admin.flush(TableName.valueOf(table)) // MemStore==>StoreFile
    } catch {
      case e:Exception => e.printStackTrace()
    } finally {
      if(null != admin) {
        admin.close()
      }

      if(null != connection) {
        connection.close()
      }
    }
  }

  def getRowKey(time:String, info:String) = {

    /**
      * 由于rowkey是采用time_crc32(info)进行拼接
      * 只要是字符串拼接，尽量不要使用+
      *
      * StringBuffer vs StringBuilder
      */

    val builder = new StringBuilder(time)
    builder.append("_")

    val crc32 = new CRC32()
    crc32.reset()
    if(StringUtils.isNotEmpty(info)){
      crc32.update(Bytes.toBytes(info))
    }
    builder.append(crc32.getValue)

    builder.toString()
  }

  def createTable(day:String, conf:Configuration) ={
    val table = "nginx_log_" + day

    var connection:Connection = null
    var admin:Admin = null
    try {
      connection = ConnectionFactory.createConnection(conf)
      admin = connection.getAdmin

      /**
        * 这个Spark作业是离线的，然后一天运行一次，如果中间处理过程中有问题
        * 下次重跑的时候，应该先把表数据清空，然后重新写入
        */
      val tableName = TableName.valueOf(table)
      if(admin.tableExists(tableName)) {
        admin.disableTable(tableName)
        admin.deleteTable(tableName)
      }

      val tableDesc = new HTableDescriptor(TableName.valueOf(table))
      val columnDesc = new HColumnDescriptor("log_format")
      tableDesc.addFamily(columnDesc)
      admin.createTable(tableDesc)
    } catch {
      case e:Exception => e.printStackTrace()
    } finally {
      if(null != admin) {
        admin.close()
      }

      if(null != connection) {
        connection.close()
      }
    }

    table
  }


}
