import com.typesafe.config.ConfigFactory
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.serialization.ByteArrayDeserializer

import scala.collection.JavaConverters._
object BasicKafkaConsumer {

  def main(args: Array[String]): Unit = {

    val conf = ConfigFactory.load()
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> conf.getString("kafka.brokers")
      ,"key.deserializer" -> classOf[ByteArrayDeserializer]
      ,"value.deserializer" -> classOf[ByteArrayDeserializer]
      ,"group.id" -> conf.getString("kafka.groupid")
      ,"receive.buffer.bytes" ->(65536:java.lang.Integer)
      ,"auto.offset.reset" -> "latest"
    ).asJava

    //从配置文件中加载topic
    val topics = conf.getString("kafka.topics").split(",").toList.asJava

    //构建consumer
    val consuemr = new KafkaConsumer[Array[Byte], Array[Byte]](kafkaParams)

    //消费者订阅指定的topic
    consuemr.subscribe(topics)
    //设置超时时间为0，表示立马从缓冲中fetch，如果有就返回记录，没有就返回empty。
    consuemr.poll(0)
    println("Starting positions are: ")
    consuemr.assignment().asScala.foreach{tp=>
      //打印出topicAndPartition对应offset
      println(s"${tp.topic()} ${tp.partition()} ${consuemr.position(tp)}")
    }
    while (true){
      //将java的集合转换成相应的scala集合，
      println(consuemr.poll(512).asScala.map(_.value().size.toLong).fold(0L)(_+_)) //获取消息body的值的累加值。
      Thread.sleep(1000)
    }
  }

}
