package com.zhaosc.spark.stream

import kafka.network.Processor
import kafka.producer.KeyedMessage
import java.util.Properties
import kafka.producer.Producer
import java.util.Date
import kafka.producer.ProducerConfig

object SparkStreamingDataManuallyProducerForKafka {

  def main(args: Array[String]): Unit = {
    val props = new Properties()
    props.put("metadata.broker.list", "localhost:9092")
    props.put("serializer.class", "kafka.serializer.StringEncoder")
    props.put("request.required.acks", "1")
    val config = new ProducerConfig(props)
    val producer = new Producer[String, String](config)
    val runtime = new Date().toString
    val msg = "message publishing time - " + runtime
    val topic = "topic1"
    val data = new KeyedMessage[String, String](topic, msg)
    producer.send(data)
    producer.close()
  }
}