package com.wzz


import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Durations, Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}


/**
 *
 * @author ${author}
 * @date 2021-08-26 16:02
 *
 */
object FromKafka {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("kafka")
    val ssc = new StreamingContext(conf, Durations.seconds(5))
    ssc.sparkContext.setLogLevel("Error")

    val value1 = Myutils.getKafkaStream("aa", ssc)


   /* KafkaUtils.createDirectStream(
      "aa",
      ssc,
      "bif"
    )*/
    val value = value1.map(a => {
      a.value()
    })
    /*val jsonObjDStream: DStream[JSONObject] = kafkavalue.map { record =>

      //获取启动日志
      val jsonStr: String = record._2
      //将启动日志转换为 json 对象
      val jsonObj = new JSONObject(jsonStr)
      //获取时间戳 毫秒数
      val ts: lang.Long = jsonObj.getLong("ts")
      //获取字符串 日期 小时
      val dateHourString: String = new SimpleDateFormat("yyyy-MM-dd HH").format(new Date(ts))
      //对字符串日期和小时进行分割，分割后放到 json 对象中，方便后续处理
      val dateHour: Array[String] = dateHourString.split(" ")
      jsonObj.put("dt",dateHour(0))
      jsonObj.put("hr",dateHour(1))
      jsonObj
    }
    //测试输出 2
    jsonObjDStream.print()*/
    value.print();
    ssc.start()
    ssc.awaitTermination()
  }

}
