package cn.edu360.streaming

import java.util.Date

import cn.edu360.streaming.etl.NginxLogEtl
import cn.edu360.streaming.utils._
import org.apache.log4j.Logger
import org.apache.spark.rdd.RDD

/**
  * Nginx日志清洗到Hdfs
  * wzxjava@126.com
  * Created by wangzhixuan on 2017/05/26 16:27
  */
object NginxLogToHive {
  val logger = Logger.getLogger(NginxLogToHive.getClass)

  def main(args: Array[String]): Unit = {
    if (args.length < 1) {
      logger.error("IllegalArgumentException")
      System.exit(1)
    }
    val name = args(0)
    val streaming = BaseStreaming(name)
    val message = StreamingUtil.createCustomDirectKafkaStream(streaming.ssc, streaming.kafkaParams, streaming.zkUtils, streaming.zkTopicPath, Set(streaming.topic))
    var oldDate = DateUtil.getCurrentDate
    message.foreachRDD(mg => {
      if (!mg.isEmpty()) {
        // ETL
        etl(mg, name)
        // 保存offsets
        ZooKeeperOffsetUtil.saveOffsets(streaming.zkUtils, streaming.zkTopicPath, mg)
      }
    })

    // ETL逻辑
    def etl(rdd: RDD[(String, String)], finalName: String): Unit = {
      // tmp 临时目录公用
      val tmpFilePath = s"${streaming.target}/tmp/"
      val timeLong = new Date().getTime()
      val date = DateUtil.getCurrentDate
      val newRdd = rdd.map(x => {
        val info = NginxLogEtl.jsonToHiveString(x._2, streaming.grok, streaming.patterns)
        info
      })
      // 一批一个文件
      newRdd.saveAsTextFile(tmpFilePath)
      val outputFileName = s"${streaming.target}/output/${date}/${timeLong}.log"
      HdfsUtil.merge(tmpFilePath, outputFileName)
      // 日期发生变化合并文件按天分区
      if (!date.equals(oldDate)) {
        val outputPath = s"${streaming.target}/output/${oldDate}/"
        val finalFileName = s"${streaming.target}/final/${oldDate}/${finalName}.log"
        HdfsUtil.merge(outputPath, finalFileName)
        oldDate = date
      }
    }
    streaming.ssc.start()
    streaming.ssc.awaitTermination()
  }
}

