package com.cyy.log.spark_extract.real_time

import com.cyy.log.spark_extract.common.LogInfo
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka.KafkaUtils
//import org.apache.spark.{SparkConf, SparkContext}

/**
  * @Author: Cyy
  * @Description:
  * @Date:Created in 16:46 2019/5/8
  */
object SparkRealTimeFormat {

  def main(args: Array[String]): Unit = {

  if(args.length!=4) {
    System.err.println("Usage:<SparkRealTimeFormat> <zkQuorum> <group> <topics> <numThreads>")
    System.exit(1)
  }

  val Array(zkQuorum, group, topics, numThreads) = args
//  val sparkConf=new SparkConf().setAppName("SparkRealTimeFormat").setMaster("local[2]")
  val spark = SparkSession.builder().master("local[2]").appName("SparkRealTimeFormat").getOrCreate()
  val sc= spark.sparkContext
  sc.setLogLevel("WARN")

  val ssc =new StreamingContext(sc,Seconds(10))

  val topicMap = topics.split(",").map((_, numThreads.toInt)).toMap

  val access=KafkaUtils.createStream(ssc, zkQuorum, group,topicMap)

  import spark.implicits._
  access.map(_._2).map(LogInfo.parseLog_reflection).print(100)
//  access.map(_._2).reduceByWindow(LogInfo.parseLog_reflection,Seconds(30),Seconds(20))


  ssc.start()

  ssc.awaitTermination()

  }

}
