package spark_base.steam

import java.util.HashMap

import java.util.Properties
import java.util.HashMap

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}


class KafkaProduce(brokers : String, topic : String,str : String)  extends Runnable{

  def run(): Unit = {
//    val topic = "test"
//    val brokers = "127.0.0.1:9092"

    val prop = new HashMap[String, Object]()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,this.brokers)
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
      "org.apache.kafka.common.serialization.StringSerializer")
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
      "org.apache.kafka.common.serialization.StringSerializer")
    prop.put("request.required.acks", "1")
    prop.put("producer.type", "async")

    val producer = new KafkaProducer[String, String](prop)
    val message = new ProducerRecord[String, String](this.topic, null, this.str)
    producer.send(message)


//   模拟
//    val content:Array[String]=new Array[String](5)
//    content(0)="kafka kafka produce"
//    content(1)="kafka produce message"
//    content(2)="hello world hello"
//    content(3)="wordcount topK topK"
//    content(4)="hbase spark kafka"
//    while (true){
//      val message = new ProducerRecord[String, String](this.topic, null, this.str)
//      producer.send(message)
//      Thread.sleep(200)
//    }
  }
}
