package com.memer

import java.util.HashMap

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}
import org.apache.spark.streaming.{Seconds, StreamingContext}


object su {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("streamingKafka")
    val streamingCon = new StreamingContext(conf, Seconds(2))

    val sqlcon=new SQLContext(streamingCon.sparkContext)

    val sum=0
    val topic = "library"
    val group = "niit111"

    streamingCon.sparkContext.setLogLevel("error")


    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "cheng:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    // 记录 元数据  ，消费位置信息
    streamingCon.checkpoint("./checkpoint")

    val topics = Array(topic)

    // 连接 kafka 配置项
    val streamRdd = KafkaUtils.createDirectStream(
      streamingCon,
      PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
    )

    val lines = streamRdd.map(_.value())

    // lines的数据        201443	 Jack	1	20221211	201929	3	L
    //
    lines.foreachRDD(
      x=>{
        //
        val c1 = x.map(
          line => {
            val obg2 = line.split("\t")(0)
            val obg3 = line.split("\t")(2)
            val aa1 = obg3.toInt
            (obg2, aa1)
          }
        ).reduceByKey(_ + _)
        c1.foreach(
          x => {
            val type1 = x._1
            val num = x._2
            val str = type1 + "," + num
            println(str)
            val props = new HashMap[String, Object]()
            props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "cheng:9092")
            props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            val producer = new KafkaProducer[String, String](props)
            producer.send(new ProducerRecord[String, String]("ench1002", str))

            producer.close()
          }
        )


      }

    )

    streamingCon.start()
    streamingCon.awaitTermination()
  }

}

