package com.shujia.spark.kafka

import java.util.Properties

import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.serialization.StringSerializer

import scala.io.Source

object Demo1Producer {
  def main(args: Array[String]): Unit = {

    //1、创建连接

    val properties: Properties = new Properties()
    properties.setProperty("bootstrap.servers", "node1:9092") //kafka连接地址
    properties.put("key.serializer", classOf[StringSerializer])
    properties.put("value.serializer", classOf[StringSerializer])

    //创建生产者
    val producer: KafkaProducer[String, String] = new KafkaProducer[String, String](properties)


    Source
      .fromFile("spark/data/students.txt")
      .getLines()
      .foreach(line => {
        /**
          * hash分区，同一个班级进入同一个分区
          */
        val clazz = line.split(",")(4)

        //hash分区
        val partition = math.abs(clazz.hashCode % 3)

        //构建一条消息，指定topic,指定分区，指定key和value
        val msg = new ProducerRecord[String, String]("topic", partition, null, line)

        //生产数据,  回调函数
        producer.send(msg, new Callback {
          override def onCompletion(recordMetadata: RecordMetadata, e: Exception): Unit = {
            println("数据发送成功")
          }
        })

        Thread.sleep(1000)

      })


    //关闭连接
    producer.close()

  }
}
