package com.jianghang.class_three

import java.util.Locale

import org.apache.commons.lang3.time.FastDateFormat

import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ListBuffer

/**
  *解析后的日志格式：
  *   ip country province city time method url protocal http_status bytes_sent referer
  *   user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
  */
object LogFormat {


  def main(args: Array[String]): Unit = {
    val session = SparkSession
      .builder()
      .master("local[2]")
      .appName("LogFormat")
      .getOrCreate()

    //读取log原始数据
    val rdd_log = session.sparkContext.textFile("")

    //把log日志转换为需要的格式
    rdd_log.map(x => x)

    val df = session.read.format("json").option("path", "").load()

    //UDF函数编写
    import org.apache.spark.sql.functions._

    def formatTime() = udf(
      (time: String) => {
        //[30/Jan/2019:00:00:21 +0800]
        val beginIndex = time.indexOf("[") + 1
        val endIndex = time.lastIndexOf("[")
        val time_str = time.substring(beginIndex, endIndex)
        val time_long = FastDateFormat.getInstance("dd/MMM/yyyy:HH:mm:ss Z", Locale.ENGLISH)
          .parse(time_str).getTime
        FastDateFormat.getInstance("yyyyMMddHHmm").format(time_long)
      }
    )

    //处理df的一个字段
    df.withColumn("formattime", formatTime()(df("time")))


    //把清洗后的数据写入HDFS
//    df.rdd.mapPartitions(
//      partition => {
//        partition.flatMap(
//          x => {
//            val ip = x.getAs[String]("ip")
//            val columns = scala.collection.mutable.HashMap[String, String]()
//            //columns.put("ip",ip)
//            columns
//
//            // HBase API  Put
//
//            //val rowkey = getRowKey(day, referer+url+ip+ua)  // HBase的rowkey
//            val rowkey = ""
//            val rk = Bytes.toBytes(rowkey)
//            val put = new Put(Bytes.toBytes(rowkey)) // 要保存到HBase的Put对象
//
//            val list = new ListBuffer[((String, String), KeyValue)]()
//            // 每一个rowkey对应的cf中的所有column字段
//            for ((k, v) <- columns) {
//              val keyValue = new KeyValue(rk, "o".getBytes, Bytes.toBytes(k), Bytes.toBytes(v))
//              list += (rowkey, k) -> keyValue
//            }
//            list.toList
//
//          }
//        )
//      }
//    ).sortByKey()
//      .map(x => {
//        val bytesWritable = new ImmutableBytesWritable(Bytes.toBytes(x._1._1))
//        (bytesWritable, x._2)
//      })


  }

}
