package com.shujia.kafka

import java.util.Properties

import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.serialization.StringSerializer

import scala.io.Source


object Demo1Producer {
  def main(args: Array[String]): Unit = {


    /**
      * kafka生产者  通过java代码往kafka中大数据
      *
      */

    //建立连接

    val properties: Properties = new Properties()
    properties.setProperty("bootstrap.servers", "node2:9092,node3:9092,node4:9092") //kafka连接地址
    properties.put("key.serializer", classOf[StringSerializer])
    properties.put("value.serializer", classOf[StringSerializer])

    //创建生产者
    val producer = new KafkaProducer[String, String](properties)


    Source
      .fromFile("spark/data/students.txt")
      .getLines()
      .foreach(line => {

        //发送数据
        val topic = "student"

        val age = line.split(",")(2).toInt

        val partition = age % 3
        val value = line

        //构建一条消息  一行数据
        val producerRecord = new ProducerRecord[String, String](topic, partition, null, value)

        //发送数据
        producer.send(producerRecord, new Callback {
          override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit = {
            println(metadata.offset() + "\t" + metadata.partition())
          }
        })


      })




    //关闭连接
    producer.close()

  }
}
