package com.qing.spark

import com.mongodb.spark.MongoSpark
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}
import org.bson.Document
import org.codehaus.jettison.json.JSONObject

/**
  * Created by wuliao on 2018/4/9.
  */
object ShenGangStreaming {


  def main(args: Array[String]): Unit = {

    //    val conf = new SparkConf()
    //      .setAppName("ShenGangStreaming")
    //      .setJars(Array("file:///mnt/disk/jar/streaming-logger-0.0.1-jar-with-dependencies.jar"))
    //      .set("spark.mongodb.input.uri", "mongodb://175.102.18.112:27018/shengang.log_data")
    //      .set("spark.mongodb.output.uri", "mongodb://175.102.18.112:27018/shengang.log_data")
    //      .setMaster("spark://175.102.18.112:7077")
    ////      .setMaster("local")
    //
    //    val sc = new SparkContext(conf)
    //
    //    val documents = sc.parallelize((1 to 10).map(i => Document.parse(s"{spark: $i}")))
    //    MongoSpark.save(documents)


    val conf = new SparkConf()
      .setAppName("ShenGangStreaming")
      .setJars(Array("file:///mnt/disk/jar/shengang/streaming-logger-0.0.1-jar-with-dependencies.jar"))
      .set("spark.mongodb.input.uri", "mongodb://175.102.18.112:27018/shengang.log_data")
      .set("spark.mongodb.output.uri", "mongodb://175.102.18.112:27018/shengang.log_data")
      .setMaster("spark://175.102.18.112:7077")
    //          .setMaster("local")


    val ssc = new StreamingContext(conf, Seconds(30))

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "impala01:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "shengang",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    //    val kafkaParams = Map[String, Object](
    //      "bootstrap.servers" -> "127.0.0.1:9092",
    //      "key.deserializer" -> classOf[StringDeserializer],
    //      "value.deserializer" -> classOf[StringDeserializer],
    //      "group.id" -> "shengang",
    //      "auto.offset.reset" -> "latest",
    //      "enable.auto.commit" -> (false: java.lang.Boolean)
    //    )

    val topics = Array("shengang")
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    stream.map(record => record.value)
      .map(s => {
        val json = new JSONObject(s)
        Document.parse(json.toString)
      })
      .foreachRDD(rdd => {
        MongoSpark.save(rdd)
        //        println(rdd.count())
      })

    ssc.start()
    ssc.awaitTermination()

  }


  //  val conf = new SparkConf()
  //    .setAppName("ShenGangStreaming")
  //    .setJars(Array("file:///mnt/disk/jar/streaming-logger-0.0.1-jar-with-dependencies.jar"))
  //    .setMaster("spark://175.102.18.112:7077")
  //
  //
  //  val ssc = new StreamingContext(conf, Seconds(30))
  //
  //  val kafkaParams = Map[String, Object](
  //    "bootstrap.servers" -> "impala01:9092",
  //    "key.deserializer" -> classOf[StringDeserializer],
  //    "value.deserializer" -> classOf[StringDeserializer],
  //    "group.id" -> "sflow",
  //    "auto.offset.reset" -> "latest",
  //    "enable.auto.commit" -> (false: java.lang.Boolean)
  //  )
  //
  //  val topics = Array("sflow", "shengang")
  //  val stream = KafkaUtils.createDirectStream[String, String](
  //    ssc,
  //    PreferConsistent,
  //    Subscribe[String, String](topics, kafkaParams)
  //  )
  //  println("begin...")
  //
  //  stream.map(record => record.value)
  //    .map(s => {
  //      val json = new JSONObject(s)
  //      val counter_name = json.optString("counter_name")
  //
  //      val timestamp = json.optString("@timestamp")
  //      val host = json.optString("host")
  //      if (counter_name.startsWith("disk")) {
  //
  //
  //      } else if (counter_name.startsWith("memory")) {
  //
  //      }
  //
  //
  //    })


}
