package kafkaStreaming


import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.util


object kafkaStreaming {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("KafkaStreaming")
    val ssc = new StreamingContext(conf, Seconds(5))
    val group = "niit"
    val topic = "stuInfo"
    ssc.sparkContext.setLogLevel("error")

    //123.56.187.176 cheng
    val kafkaParam = Map[String, Object](
      "bootstrap.servers" -> "cheng:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,
      // 消费模式 从头消费， 从尾消费，当前消费
      "auto.offset.reset" -> "earliest",
      // 消费的元数据 信息， 消费的位置
      // 是否自动提交
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    ssc.checkpoint("./checkpoint")
    val lineStream = KafkaUtils.createDirectStream(ssc, PreferConsistent, ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParam))
    //拿value
    val line = lineStream.map(_.value())
    line.print()

    line.foreachRDD(
      x => {
        // stuInfo
        // sex     3
        //  x   rdd
        val result = x.map(
          line => {
            // sex
            val sex = line.split("\t")(2)
            var x = ""
            if (sex == "1") {
              // 1 性别  1 映射的1  (key,1)
              x = sex
            } else if (sex == "0") {
              // 0  性别   ，1   映射的1  (key,1)
              x = sex
            }
            (x, 1)
          }
        ).reduceByKey(_ + _)
        //  reduceByKey      RDD [("java",10) ,  ("spark",11) ]

        //  result      RDD [("java",10) ,  ("spark",11) ]


        //   结果   result   RDD [("java",10) ,  ("spark",11) ]
        //               ("java",10)  tuple
        result.foreach(
          y => {
            val sex = y._1
            val num = y._2
            // res
            val res = sex + "," + num
            // 发送 kafka
            val kafkaProp = new util.HashMap[String, Object]()
            kafkaProp.put("bootstrap.servers", "cheng:9092")
            kafkaProp.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            kafkaProp.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            val producer = new KafkaProducer[String, String](kafkaProp)
            producer.send(new ProducerRecord[String, String]("zyx_test2", res))
            producer.close()
          }
        )
      }
    )
    ssc.start()
    ssc.awaitTermination()
  }
}
