package com.qing.spark

import java.text.SimpleDateFormat

import com.qing.spark.dao.{CPUDao, MemoryDao}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.codehaus.jettison.json.JSONObject

import scala.collection.mutable.ListBuffer

/**
  * Created by Administrator on 2018/4/11 0011.
  */
object UsedStreaming {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("UsedStreaming")
      .setJars(Array("file:///mnt/disk/jar/mongodb/streaming-logger-used-0.0.1-jar-with-dependencies.jar"))
      .setMaster("spark://175.102.18.112:7077")
    //          .setMaster("local")


    val ssc = new StreamingContext(conf, Seconds(30))

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "impala01:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "shengang",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = Array("shengang")
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    stream.map(record => record.value)
      .map(s => {
        new JSONObject(s)
      }).filter(s => {
      val typeT = s.getString("counter_name")
      typeT.equals("memory_util") || typeT.equals("cpu_util")
    }).map(s => {

      val timestamp = s.getString("@timestamp")
      val sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")

      val dt = sdf.parse(timestamp)
      val time = dt.getTime
      val second = (time / 1000).toLong
      val minute = (second / 60).toLong
      val hour = (minute / 60).toLong
      val day = (hour / 24).toInt

      var counter_volume: java.lang.Double = 0.0
      try {
        counter_volume = s.getDouble("counter_volume")
      } catch {
        case e: Exception => {
          counter_volume = s.getInt("counter_volume").toDouble
        }
      }

      val host = s.getString("host")
      (s.getString("counter_name") + "-" + host + "-" + day,
        (counter_volume, host, day, 1))
    }).foreachRDD(rdd => {
      rdd.reduceByKey(func)
        .map(s => {
          val typeT = s._1.split("-").apply(0)
          (typeT, (s._2._2 + "-" + s._2._3, s._2._1 / s._2._4, s._2._2, s._2._3))
        })
        .groupByKey()
        .foreachPartition(it => {

          it.foreach(msg => {
            if (msg._1.equals("memory_util")) {
              val list = new ListBuffer[Tuple4[java.lang.String, Double, java.lang.String, Int]]
              msg._2.foreach(temp => {
                list.append((temp._1, temp._2, temp._3, temp._4))
              })
              MemoryDao.save(list)
            } else if (msg._1.equals("cpu_util")) {
              val list = new ListBuffer[Tuple4[java.lang.String, Double, java.lang.String, Int]]
              msg._2.foreach(temp => {
                list.append((temp._1, temp._2, temp._3, temp._4))
              })
              CPUDao.save(list)
            }
          })


        })
    })

    ssc.start()
    ssc.awaitTermination()
  }

  def func(pre: Tuple4[java.lang.Double, java.lang.String, Int, Int],
           after: Tuple4[java.lang.Double, java.lang.String, Int, Int]):
  Tuple4[java.lang.Double, java.lang.String, Int, Int] = {
    (pre._1 + after._1, pre._2, pre._3, pre._4 + after._4)
  }


}
