package com.tr.rts.cdf

import java.io.FileOutputStream
import java.util.{Properties, UUID}

import kafka.consumer.{Consumer, ConsumerConfig}
import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}

class KafkaConsumer extends FunSuite with Matchers with BeforeAndAfter {
  val topic = "ecpdevcdf.Quote.Skeletal.1.Default"
  //val topic = "ecpdevcdf.BIMInstrument.All.1.Default"
  val consumerBroker = "10.205.0.73:2181"   //c476:10.205.0.73:2181 c323:10.205.0.74:2181 c385:10.205.0.75:2181 kafka-cis:10.205.248.62
  val props = new Properties()
  props.put("zookeeper.connect", consumerBroker)
  props.put("group.id", UUID.randomUUID().toString())
  props.put("zookeeper.session.timeout.ms", "4000")
  props.put("zookeeper.sync.time.ms", "2000")
  props.put("auto.commit.interval.ms", "10000")
  props.put("auto.offset.reset", "smallest")
  props.put("serializer.class", "kafka.serializer.StringEncoder")

  /*
  test("consume cdf message from kafka") {
    val consumerConfig = new ConsumerConfig(props)
    val consumer = Consumer.create(consumerConfig)
    val consumerMap = consumer.createMessageStreams(Map(topic -> 1))
    val streams = consumerMap.get(topic).get

    val out = new FileOutputStream("D:/message.avro")

    for (stream <- streams) {
      val it = stream.iterator()
      while (it.hasNext()){
        val data = it.next()
        out.write(data.message)
        println(s"print message finished, ${data.message.length}")
      }
    }
    out.close()
  }
  */
}