package spark_base.steam

import com.alibaba.fastjson.JSON
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe

object KafkaConsumer {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("consumer")
    val streamingContext = new StreamingContext(conf, Seconds(10))
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "192.168.88.131:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "test-consumer-group",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = Array("test")
    val stream = KafkaUtils.createDirectStream[String, String](
      streamingContext,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )
    val topic = "test"
    val brokers = "127.0.0.1:9092"
    stream.map(record => (record.key, record.value)).print();

//    val events = stream.flatMap(line => Some(JSON.parseObject(line.value())))
//    val orders = events.map(
//      x => (x.getString("os_type"), x.getLong("click_count")))
//      .groupByKey()
//      .map(x => (x._1, x._2.size, x._2.reduceLeft(_ + _)))

        stream.foreachRDD(x =>
      x.foreachPartition(partition =>
        partition.foreach(x => {
          println("kafka=" + x)
          new Thread(new KafkaProduce(brokers,topic,(math.random*5).toString)).start()
        })
      ))
    streamingContext.start()
    streamingContext.awaitTermination()
  }

}
