package fun.luami.consume

import java.util.regex.Pattern

import ConsumeConstant.ConsumeConstant
import bean.model.B2BModel
import com.alibaba.fastjson.JSON
import fun.lumia.common.SparkTool
import org.apache.spark.sql.SaveMode
import utils.timeUtils.{formatEndLong, formatStartLong, tranTimeb2bfh, tranTimeb2bhy}

import scala.collection.mutable.ListBuffer

object consumeFromPath extends SparkTool {
  /**
   * 在run方法里面编写spark业务逻辑
   */
  override def run(args: Array[String]): Unit = {
    //    val fonsviewSourceRDD = sc.textFile(ConsumeConstant.FONSVIEW_DATA_LOCAL_PATH)
    //    val hangYanSourceRDD = sc.textFile(ConsumeConstant.HANGYAN_DATA_LOCAL_PATH)
    val fonsviewSourceRDD = sc.textFile(ConsumeConstant.FONSVIEW_DATA_HDFS_PATH)
    val hangYanSourceRDD = sc.textFile(ConsumeConstant.HANGYAN_DATA_HDFS_PATH)
    //    println(fonsviewSourceRDD.count())
    //    println(hangYanSourceRDD.count())
    //    hangYanSourceRDD.take(10).foreach(println)
    // 解析烽火的数据
    val sqlContext = sql
    import sqlContext.implicits._

    val fonsviewDF = fonsviewSourceRDD.filter(line => {
      val json = JSON.parseObject(line)
      val message = json.getString("message")
      val messageSplits = message.split("\\|")
      val filesize = messageSplits(14).trim()
      val reqstarttime = messageSplits(15).trim()
      val reqendtime = messageSplits(16).trim()
      messageSplits.length == 46 &&
        Pattern.matches("\\d+\\.*\\d*", filesize) &&
        Pattern.matches("\\d{4}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", reqstarttime) &&
        Pattern.matches("\\d{4}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", reqendtime) &&
        tranTimeb2bfh(reqendtime) >= tranTimeb2bfh(reqstarttime)
    }).flatMap(line => {
      val json = JSON.parseObject(line)
      val message = json.getString("message")
      val messageSplits = message.split("\\|")
      val server_ip = messageSplits(1)
      val reqdomain = messageSplits(6)
      val vendor = ConsumeConstant.FONSVIEW_VENDOR_NAME
      val filesize = messageSplits(14).trim().toDouble
      val reqstarttime = tranTimeb2bfh(messageSplits(15))
      val reqendtime = tranTimeb2bfh(messageSplits(16))
      val sendtime = reqendtime - reqstarttime
      //      val timestamp = 000000000L
      //      val listBuffer = new ListBuffer[B2BModel]
      //      val fonsview = B2BModel(timestamp, server_ip, reqdomain, vendor, filesize, 1)
      //      listBuffer.append(fonsview)
      //      listBuffer
      splitLineData(server_ip, reqdomain, vendor, filesize, reqstarttime, reqendtime, sendtime)
    })
    fonsviewDF.take(10).foreach(println)
    val hangYanRDD = hangYanSourceRDD.filter(line => {
      val json = JSON.parseObject(line)
      val message = json.getString("message")
      val messageSplits = message.split("\\|")
      val filesize = messageSplits(14).trim()
      val reqstarttime = messageSplits(15).trim()
      val reqendtime = messageSplits(16).trim()
      messageSplits.length == 64 &&
        Pattern.matches("\\d+\\.*\\d*", filesize) &&
        Pattern.matches("\\d{8}[a-zA-Z]+\\d{6}\\.\\d{3}[a-zA-Z]+", reqstarttime) &&
        Pattern.matches("\\d{8}[a-zA-Z]+\\d{6}\\.\\d{3}[a-zA-Z]+", reqendtime) &&
        tranTimeb2bhy(reqendtime) >= tranTimeb2bhy(reqstarttime)
    }).flatMap(line => {
      val json = JSON.parseObject(line)
      val message = json.getString("message")
      val messageSplits = message.split("\\|")
      val server_ip = messageSplits(1).trim()
      val reqdomain = messageSplits(6).trim()
      val vendor = ConsumeConstant.HANGYAN_VENDOR_NAME
      val filesize = messageSplits(14).trim().toDouble
      val reqstarttime = tranTimeb2bhy(messageSplits(15))
      val reqendtime = tranTimeb2bhy(messageSplits(16))
      val sendtime = reqendtime - reqstarttime
      splitLineData(server_ip, reqdomain, vendor, filesize, reqstarttime, reqendtime, sendtime)
    })
    val unionDF = hangYanRDD.union(fonsviewDF).toDF()
    //    unionDF.write.mode(SaveMode.Overwrite).parquet(ConsumeConstant.OUTPUT_LOCAL_PATH)
    unionDF.write.mode(SaveMode.Overwrite).parquet(ConsumeConstant.OUTPUT_HDFS_PATH)
  }

  /**
   * 初始化spark配置
   *  conf.setMaster("local")
   */
  override def init(): Unit = {
    //    conf.setMaster("local[4]")
    conf.setAppName("consume")
  }

  def calcuNewFilesize(consistTime: Long, filesize: Double, sendtime: Long): Double = {
    consistTime * filesize / sendtime
  }

  def splitLineData(server_ip: String,
                    reqdomain: String,
                    vendor: String,
                    filesize: Double,
                    reqstarttime: Long,
                    reqendtime: Long,
                    sendtime: Long): ListBuffer[B2BModel] = {
    val StartTimeFormat = formatStartLong(reqstarttime)
    val EndTimeFormat = formatEndLong(reqendtime)
    var i = 1
    var timestampTemp = StartTimeFormat
    val windowGap = 5 * 60 * 1000
    val count = (EndTimeFormat - StartTimeFormat) / windowGap
    val list = new ListBuffer[B2BModel]
    var filesizeNew: Double = filesize
    var consistTime = 0L

    if (sendtime != 0) {
      if (count > 1) {
        while (i < count) {
          filesizeNew = 0.0
          var consistTimeTemp = 0L
          if (i == 1) {
            consistTimeTemp = timestampTemp + windowGap - reqstarttime
          } else {
            consistTimeTemp = windowGap
          }
          filesizeNew = calcuNewFilesize(consistTimeTemp, filesize, sendtime)
          list.append(B2BModel(
            timestampTemp,
            server_ip,
            reqdomain,
            vendor,
            filesizeNew,
            1
          ))
          timestampTemp = timestampTemp + windowGap
          i += 1
        }
        consistTime = reqendtime - timestampTemp
      } else {
        consistTime = reqendtime - reqstarttime
      }
      filesizeNew = calcuNewFilesize(consistTime, filesize, sendtime)
    } else {
      filesizeNew = filesize
    }

    list.append(B2BModel(
      timestampTemp,
      server_ip,
      reqdomain,
      vendor,
      filesizeNew,
      1
    ))
    list

  }
}
