//package org.xukai.spark.streaming.scala
//
//
//import com.typesafe.config.ConfigFactory
//import kafka.serializer.StringDecoder
//import org.apache.log4j.Logger
//import org.apache.spark.SparkConf
//import org.apache.spark.rdd.RDD
//import org.apache.spark.streaming.kafka.KafkaUtils
//import org.apache.spark.streaming.{Seconds, StreamingContext, Time}
//
///**
//  * @desc
//  * @author xukai
//  * @date 2017-12-30-下午 8:01
//  */
//class DirectKafkaDefaultExample {
//  private val conf = ConfigFactory.load()
//  private val logger = Logger.getLogger(this.getClass)
//  def main(args: Array[String]) {
//    if (args.length < 2) {
//      System.exit(1)
//    }
//    val Array(brokers, topics) = args
//    val checkpointDir = "/tmp/checkpointLogs"
//    val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers)
//    // Extract : Create direct kafka stream with brokers and topics
//    val topicsSet = topics.split(",").toSet
//    val ssc = StreamingContext.getOrCreate(checkpointDir, setupSsc(topicsSet, kafkaParams, checkpointDir))
//    ssc.start()// Start the spark streaming
//    ssc.awaitTermination();
//  }
//  def setupSsc(topicsSet:Set[String],kafkaParams:Map[String,String],checkpointDir:String)():StreamingContext=
//  { //setting sparkConf with configurations
//    val sparkConf = new SparkConf()
//    sparkConf.setAppName(conf.getString("DirectKafkaDefaultExample"))
//    val ssc = new StreamingContext(sparkConf, Seconds(5))
//    val messages = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
//      ssc, kafkaParams, topicsSet)
//    val line = messages.map(_._2)
//    val lines = line.flatMap(line => line.split("\n"))
////    val filteredLines = lines.filter { x => LogFilter.filter(x, "1") }
//    lines.foreachRDD((rdd: RDD[String], time: Time) => {
//      rdd.foreachPartition { partitionOfRecords => {
//        if (partitionOfRecords.isEmpty) {
//          logger.info("partitionOfRecords FOUND EMPTY ,IGNORING THIS PARTITION")
//        } else {
//          /* write computation logic here  */
//        }
//      } //partition ends
//      }//foreachRDD ends
//    })
//    ssc.checkpoint(checkpointDir) // the offset ranges for the stream will be stored in the checkpoint
//    ssc }
//
//}
