package scala.hdfs.producer

import java.time.Duration
import java.util.Properties

import org.apache.kafka.clients.consumer.KafkaConsumer

object FlinkSQLConsumer {

  val topic = "test_topic"

  def main(args: Array[String]): Unit = {
    kafkaSourceProducer()
  }

  def kafkaSourceProducer(): Unit = {
//    val servers = "bigdata-test-4:9092,bigdata-test-5:9092,bigdata-test-6:9092"
    val servers = "cdh-hadoop-2:9092"
    val acks = "-1"
    val keySer = "org.apache.kafka.common.serialization.StringDeserializer"
    val valueSer = "org.apache.kafka.common.serialization.StringDeserializer"
    val properties = new Properties()
    properties.put("bootstrap.servers", servers)
//    properties.put("acks", acks)
    properties.put("key.deserializer", keySer)
    properties.put("value.deserializer", valueSer)
    properties.put("auto.offset.reset","earliest")
    properties.put("group.id", "group11")
    properties.put("enable.auto.commit", "true")//设置是否为自动提交

    val consumer = new KafkaConsumer[String, String](properties)
    import java.util.Collections
    consumer.subscribe(Collections.singletonList(topic))
//    consumer.subscribe(new Pattern(topic))
//    import org.apache.kafka.common.TopicPartition
//    import java.util
//    val partition = new TopicPartition(topic, 0)
//    consumer.assign(util.Arrays.asList(partition))
//    consumer.seekToBeginning(util.Arrays.asList(partition))
//    consumer.seek(partition, 0)

    while (true) {
      println("----------------------------------")
      val value = consumer.poll(Duration.ofSeconds(1))

      println("===============value=>>>>>>>>>>>>>>>>>" + value.count())

      if (value.iterator().hasNext) {
        println("===============key=>>>>>>>>>>>>>>>>>" + value.iterator().next().key())
        println("===============value=>>>>>>>>>>>>>>>>>" + value.iterator().next().value())
      }
        println("=====================================")
//      }
    }

    consumer.close()
  }

}
