package com.shujia.kafka

import java.util.Properties

import com.alibaba.fastjson.JSON
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.Source

object Demo2StudentTokafka {
  def main(args: Array[String]): Unit = {
    /**
      * 1、创建生产者
      *
      */

    val properties = new Properties()

    //1、kafka broker列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    //2、指定kv的序列化类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")


    val kafkaProducer = new KafkaProducer[String, String](properties)


    /**
      * 读取本地文件
      *
      */

    val studentList: List[String] = Source.fromFile("data/students.json").getLines().toList


    //kafka-topics.sh --create --zookeeper master:2181,node1:2181,node2:2181 --replication-factor 2 --partitions 3 --topic student3

    //循环将数据写入kafka

    for (student <- studentList) {

      /**
        * 将同一个班级的数据写u如到同一个分区中
        *
        */

      val clazz: String = JSON.parseObject(student).getString("clazz")

      //hash分区, kafka生产者默认是轮询
      val partition: Int = Math.abs(clazz.hashCode % 3)


      //一行数据
      val record = new ProducerRecord[String, String]("student3", partition, null, student)

      //将数据发送到kafka
      kafkaProducer.send(record)
      kafkaProducer.flush()

    }

    kafkaProducer.close()

  }

}
