package LogServer.spark.streaming

import LogServer.constants.{PropertiesAssemblyJar, PropertiesMongo, PropertiesStreaming}
import LogServer.dao.{Demo, MongoConnector, VmhostDao}
import LogServer.kafka.KafkaConnector
import LogServer.spark.sql.SqlContext
import com.mongodb.casbah.commons.{Imports, MongoDBObject}
import kafka.serializer.StringDecoder
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.codehaus.jettison.json.JSONObject

/**
  * Created by root on 17-2-14.
  */
class SparkStreaming {
  /**
    * 开始执行SparkSteaming
    */
  def startJob(): Unit ={
    val conf = new SparkConf().setAppName("LogServer").setMaster("spark://master:7077").
      set("spark.executor.memory", "500m").setJars(PropertiesAssemblyJar.getAssemblyJars())
    val timespan = PropertiesStreaming.getTimespan()
    val ssc = new StreamingContext(conf, Seconds(timespan))

    val kafkaConnector = new KafkaConnector()
    val dstream = kafkaConnector.createStream(ssc)

    //在主节点获取mongo信息
    val propertiesMongoObject = PropertiesMongo.getAllProperties()
    handler(dstream,propertiesMongoObject,ssc.sparkContext)

    //Start Job
    ssc.start()
    ssc.awaitTermination()
  }

  /**
    * 处理dstream,并传入mongodb属性信息
    * @param dstream
    * @param propertiesMongoObject
    */
  def handler(dstream: DStream[String],propertiesMongoObject: PropertiesMongo,sc: SparkContext): Unit ={
//        dstream.foreachRDD(rdd => {
//          rdd.foreachPartition(par => {
//            if (!par.isEmpty) {
//              Demo.saveToDb(par,propertiesMongoObject)
//            }
//          })
//        })
    val vmhostDao = new VmhostDao()
    vmhostDao.save(dstream,propertiesMongoObject,sc)
  }
}
