package com.shujia.kafka

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.Source

object Demo2DianXnToKafka {
  def main(args: Array[String]): Unit = {

    //读取数据
    val data: List[String] = Source.fromFile("Spark/data/dianxin_data")
      .getLines()
      .toList


    val properties = new Properties()

    //1、指定kafkabroker地址
    properties.setProperty("bootstrap.servers", "master:9092")

    //2、指定kv的序列化类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")

    //生产者对象
    val producer = new KafkaProducer[String, String](properties)

    /**
      * kafka-topics.sh --create --zookeeper master:2181 --replication-factor 1 --partitions 3 --topic dianxin2
      */

    data.foreach(d => {

      val cityId: String = d.split(",")(2)

      //hash 分区
      val partition: Int = math.abs(cityId.hashCode % 3)


      val record = new ProducerRecord[String, String]("dianxin2", partition, cityId, d)

      producer.send(record)

    })

    producer.close()


  }

}
