package com.andnnl


import java.util.Properties

import kafka.producer.{KeyedMessage, Producer, ProducerConfig}
import kafka.serializer.StringDecoder
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by chenss on 2017/12/22.
  */
object producer {
    def main(args: Array[String]): Unit = {
        val topic ="words"
        val brokers ="master:9092,slave1:9092,slave2:9092"
        val props=new Properties()
        props.put("metadata.broker.list", brokers)
        //props.put("serializer.class", "kafka.serializer.StringEncoder")
        props.put("serializer.class", "org.apache.kafka.common.serialization.ByteArraySerializer")
        props.put("bootstrap.servers", brokers)
        props.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer")
        props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer")
        val producer: KafkaProducer[Array[Byte], Array[Byte]] = new KafkaProducer[Array[Byte], Array[Byte]](props)


//        val kafkaConfig=new ProducerConfig(prop)
//        val producer=new Producer[String,String](kafkaConfig)

        val content:Array[String]=new Array[String](5)
        content(0)="kafka kafka produce"
        content(1)="kafka produce message"
        content(2)="hello world hello"
        content(3)="wordcount topK topK"
        content(4)="hbase spark kafka"
        while (true){
            val i=(math.random*5).toInt
            val record = new ProducerRecord[Array[Byte], Array[Byte]](topic, content(i).getBytes())

            //producer.send(new KeyedMessage[String,String](topic,content(i)))
            println(content(i))
            producer.send(record)
            Thread.sleep(200)
        }
    }
}