package com.shujia.kafka

import java.util.Properties

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.Source

object Demo2Student {
  def main(args: Array[String]): Unit = {
    /**
      *
      * 1、创建生产者，和kafka的集群建立网络链接
      */

    val properties = new Properties
    //指定kafka链接参数

    //指定kafka briker列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    //指定key value序列化类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")

    val kafkaProducer = new KafkaProducer[String, String](properties)


    //读取学生表的数据
    val students: List[String] = Source.fromFile("data/students.json").getLines().toList


    //循环将数据写入kafka中
    students.foreach(student => {
      /**
        * 如果topic不存在会自动创建一个分区为1副本为1的topic
        * kafka-topics.sh --create --zookeeper master:2181,node1:2181,node2:2181 --replication-factor 3 --partitions 3 --topic student2
        *
        * 生产者默认的分钟策略是轮询
        */
      //将同一个班级的数据写入同一个分区
      val jsonObj: JSONObject = JSON.parseObject(student)
      val clazz: String = jsonObj.getString("clazz")
      //hash分区
      val partition: Int = math.abs(clazz.hashCode) % 3

      val record = new ProducerRecord[String, String]("student2", partition, clazz, student)

      //发生数据到kafak中
      kafkaProducer.send(record)

      kafkaProducer.flush()
    })

    kafkaProducer.close()


  }

}
