package com.memer

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.util.{HashMap, Properties}

object Four {


  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("kafkaDstream")
    val ssc = new StreamingContext(conf,Seconds(2))


    ssc.sparkContext.setLogLevel("error")

    //kafka配置
    // 线上 123.56.187.176   cheng

    //1）kafka 依赖 2） sparkstreaming-kafka
    // 方法1： jas 文件夹
    //方法2： pom 导入
    //    val topic = "stuInfo"
    val topic = "library"
    val group = "niit"
    val kafkaParams = Map[String, Object](
      // 1 kafka 所在地址
      "bootstrap.servers" -> "cheng:9092",

      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,

      // 消费模式 从头消费， 从尾消费，当前消费
      "auto.offset.reset" -> "earliest",//从头消费

      // 消费的元数据 信息， 消费的位置
      // 是否自动提交
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    ssc.checkpoint("./checkpoint")

    //连接kafka
    val linesStream = KafkaUtils.createDirectStream(
      ssc,
      //策略 PreferConsistent  kafka 集群 master /leader
      PreferConsistent,

      ConsumerStrategies.Subscribe[String,String](Array(topic), kafkaParams)
    )

    val line = linesStream.map(_.value())

    //一行数据
    //    line.print()
    line.foreachRDD(
      x=>{

        val result = x.map(
          line => {
            //书籍号
            val num = line.split("\t")(3) + "," + line.split("\t")(2)
            (num,1)
            //                val num = line.split("\t")(0)
            //                val num2 = line.split("\t")(2)
            //
            //                (num, num2)
          }
        ).reduceByKey(_+_)

        result.foreach(println(_))



        result.foreach(
          y =>{
            val num = y._1
            val sum= y._2
            val res = num+","+sum
            //            println(res)

            // 发送kafka

            val kafkapro = new HashMap[String,Object]()
            //            kafkapro.put("bootstrap.servers", "cheng:9092")
            kafkapro.put("bootstrap.servers", "niit01:9092")
            kafkapro.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            kafkapro.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

            val producer = new KafkaProducer[String,String](kafkapro)
            producer.send(new ProducerRecord[String,String]("t222222",res))
            //            producer.flush()
            producer.close()

          }
        )

      }
    )

    //监听器
    ssc.start()
    ssc.awaitTermination()


  }


}
