package com.qing.spark

import java.io.{File, PrintWriter}
import java.util
import java.util.UUID

import com.qing.spark.beans.Message
import com.qing.spark.dao._
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}
import org.codehaus.jettison.json.JSONObject
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe

import scala.collection.mutable.ListBuffer
import scala.collection.JavaConversions._

/**
  * Created by wuliao on 2018/3/9.
  */
object FlumeStreamingLogger {

  //  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName("LoggerStreaming")
      .setJars(Array("file:///mnt/disk/jar/streaming-logger-0.0.1-jar-with-dependencies.jar"))
      //      .setMaster("local[6,100000]")
      //      .setMaster("spark://0.0.0.0:7077")
      .setMaster("spark://175.102.18.112:7077")


    val ssc = new StreamingContext(conf, Minutes(30))


    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "impala01:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "sflow",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = Array("sflow")
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )
    println("begin...")
    stream.map(record => record.value)
      .map[Message](s => {
      try {
        val returnData = new JSONObject(s)
        val message = new Message(returnData.optString("@timestamp"),
          returnData.optString("message"), returnData.optString("host")).parse()
        message
      } catch {
        case e: Exception => {
          null
        }
      }
    }).filter(_ != null)
      .foreachRDD(rdd => {
        println(rdd.count())
        if (rdd.count() > 0) {
          //储存所有信息
          //          rdd.foreachPartition(partition => {
          //            val list = new ListBuffer[Message]
          //            partition.foreach(msg => {
          //              list.append(msg)
          //            })
          //            MessageDao.saveMessages(list)
          //            TimeMessageDao.saveMessages(list)
          //          })

          //hour_count
          rdd.map(msg => {
            (msg.getHour, (msg.getPacketSize, msg.getDatagramSize))
          }).reduceByKey(addSize)
            .map(s => {
              HourCount(s._1, s._2._1, s._2._2)
            })
            .foreachPartition(partition => {
              val list = new ListBuffer[HourCount]
              partition.foreach(msg => {
                list.append(msg)
              })
              HourCountDao.save(list)
            })

          //day_count
          //          rdd.map(msg => {
          //            (msg.getDay, (msg.getPacketSize, msg.getDatagramSize))
          //          }).reduceByKey(addSize)
          //            .map(s => {
          //              DayCount(s._1, s._2._1, s._2._2)
          //            })
          //            .foreachPartition(partition => {
          //              val list = new ListBuffer[DayCount]
          //              partition.foreach(msg => {
          //                list.append(msg)
          //              })
          //              DayCountDao.save(list)
          //            })

          //ip_day_count
          //          rdd.map(msg => {
          //
          //            if (msg.getInPort == 0) {
          //              ((msg.getOutIp, msg.getDay), (msg.getPacketSize, msg.getDatagramSize))
          //            } else {
          //              ((msg.getSrcIp, msg.getDay), (msg.getPacketSize, msg.getDatagramSize))
          //            }
          //
          //          }).reduceByKey(addSize)
          //            .map(s => {
          //              IPDayCount(s._1._1, s._1._2, s._2._1, s._2._2)
          //            })
          //            .foreachPartition(partition => {
          //              val list = new ListBuffer[IPDayCount]
          //              partition.foreach(msg => {
          //                list.append(msg)
          //              })
          //              IPDayCountDao.save(list)
          //            })

          //ip_hour_count
          //          rdd.map(msg => {
          //            if (msg.getInPort == 0) {
          //              ((msg.getOutIp, msg.getDay), (msg.getPacketSize, msg.getDatagramSize))
          //            } else {
          //              ((msg.getSrcIp, msg.getDay), (msg.getPacketSize, msg.getDatagramSize))
          //            }
          //          }).reduceByKey(addSize)
          //            .map(s => {
          //              IPHourCount(s._1._1, s._1._2, s._2._1, s._2._2)
          //            })
          //            .foreachPartition(partition => {
          //
          //              val list = new ListBuffer[IPHourCount]
          //              partition.foreach(msg => {
          //                list.append(msg)
          //              })
          //              IPHourCountDao.save(list)
          //            })

        }
      })

    //      .map[Message](s => {
    //      try {
    //        val returnData = new JSONObject(s)
    //        val message = new Message(returnData.optString("@timestamp"),
    //          returnData.optString("message")).parse()
    //        message
    //      } catch {
    //        case e: Exception => {
    //          println("error.....")
    //          null
    //        }
    //      }
    //    })
    //      .filter(_ != null)
    //      .foreachRDD(rdd => {
    //        rdd.foreachPartition(parts => {
    //          //          val list = new ListBuffer[Message]
    //          if (parts.size > 0) {
    //            println("....." + parts.next().getDay + ".....")
    //          }
    //          println(parts.size + ".....")
    //          //          parts.foreach(msg => {
    //          //            list.append(msg)
    //          //          })
    //          //          new MessageDao().saveMessages(list)
    //        })
    //      })

    //    val flumeStream = KafkaUtils.createRDD(sc, hostname, port.toInt)

    //    flumeStream.map(x => new String(x.event.getBody.array()).trim)
    //      .foreachRDD(rdd => {
    //        println("。。。。。" + rdd.count())
    //        rdd.foreachPartition(parts => {
    //          println("......" + parts.size)
    //        })
    //      })

    //    flumeStream.map(x => new String(x.event.getBody.array()).trim)
    //      .map[Message](s => {
    //      try {
    //        val returnData = new JSONObject(s)
    //        val message = new Message(returnData.optString("@timestamp"),
    //          returnData.optString("message")).parse()
    //        message
    //      } catch {
    //        case e: Exception => {
    //          println("......")
    //          null
    //        }
    //      }
    //    }).filter(_!=null)
    //      .foreachRDD(rdd => {
    //        rdd.foreachPartition(parts => {
    //          val list = new ListBuffer[Message]
    //          parts.foreach(msg => {
    //            list.append(msg)
    //          })
    ////          new MessageDao().saveMessages(list)
    //          println("......"+list.size)
    //        })
    //      })

    ssc.start()
    ssc.awaitTermination()
  }


  def addSize(pre: Tuple2[Int, Int], after: Tuple2[Int, Int]): Tuple2[Int, Int] = {
    (pre._1 + after._1, pre._2 + after._2)
  }


  //          rdd.foreach(msg => {
  //            val table = HBaseUtils.getInstance.getTable("logger")
  //            val put = new Put(UUID.randomUUID().toString().replaceAll("-", "").getBytes)
  //            put.addColumn("data".getBytes, "day".getBytes, String.valueOf(msg.getDay).getBytes)
  //            put.addColumn("data".getBytes, "hour".getBytes, String.valueOf(msg.getHour).getBytes)
  //            put.addColumn("data".getBytes, "minute".getBytes, String.valueOf(msg.getMinute).getBytes)
  //            put.addColumn("data".getBytes, "second".getBytes, String.valueOf(msg.getSecond).getBytes)
  //            put.addColumn("data".getBytes, "time".getBytes, String.valueOf(msg.getTime).getBytes)
  //            put.addColumn("data".getBytes, "proxyIp".getBytes, msg.getProxyIp.getBytes)
  //            put.addColumn("data".getBytes, "srcIp".getBytes, msg.getSrcIp.getBytes)
  //            put.addColumn("data".getBytes, "srcPort".getBytes, String.valueOf(msg.getInPort).getBytes)
  //            put.addColumn("data".getBytes, "outIp".getBytes, msg.getOutIp.getBytes)
  //            put.addColumn("data".getBytes, "outPort".getBytes, String.valueOf(msg.getOutPort).getBytes)
  //            put.addColumn("data".getBytes, "host".getBytes, msg.getHost.getBytes)
  //            table.put(put)
  //          })


}
