package com.shujia.kafka

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.Source

object Demo2StudentToKafka {
  def main(args: Array[String]): Unit = {
    /**
      * 1、创建生产者
      */

    /**
      *
      * 先创建一个topic 并指定分区
      * kafka-topics.sh --create --zookeeper master:2181,node1:2181,node2:2181 --replication-factor 3 --partitions 3 --topic student1
      */
    val properties = new Properties()

    //1、指定kafka broker地址
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    //2、指定kv的序列化类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")

    //创建生产者
    val producer: KafkaProducer[String, String] = new KafkaProducer[String,String](properties)

    //读取学生表的数据
    val students: List[String] = Source.fromFile("data/students.txt").getLines().toList

    /**
      * 将同一个班级保存在同一个分区中
      */

    for (student<- students) {
      val clazz: String = student.split(",")(4)

      val partition: Int = Math.abs(clazz.hashCode % 3)

      val record = new ProducerRecord[String,String]("student1",partition,null,student)

      producer.send(record)

      producer.flush()
    }

    producer.close()
  }
}
