package com.niit.sparkStreaming

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.util
object sparkStreaming {
    def main(args: Array[String]): Unit = {
      val conf = new SparkConf().setMaster("local[*]").setAppName("sparkStreaming")
      val ssc = new StreamingContext(conf, Seconds(10))
      val topic = "stuInfo"
      val group = "niit0304"
      val kafkaParms = Map[String, Object](
        // 1 kafka 所在地址
        "bootstrap.servers" -> "niit01:9092",

        "key.deserializer" -> classOf[StringDeserializer],
        "value.deserializer" -> classOf[StringDeserializer],
        "group.id" -> group,
        // 消费模式 从头消费， 从尾消费，当前消费
        "auto.offset.reset" -> "earliest",
        // 消费的元数据 信息， 消费的位置
        // 是否自动提交
        "enable.auto.commit" -> (false: java.lang.Boolean)
      )
      ssc.checkpoint("./chekpoint")
      val linesStream = KafkaUtils.createDirectStream(
        ssc,
        PreferConsistent,
        ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParms)
      )

      val line = linesStream.map(_.value())
       line.foreachRDD(
         x =>{
           val result1 = x.map(
             line =>{
               val sex = line.split("\t")(2)
               var x = ""
               if(sex == "1"){
                 x = sex
               }else if(sex == "2"){
                 x = sex
               }
               (x,1)
             }
           ).reduceByKey(_+_)
           result1.foreach(println(_))
           result1.foreach(
             y =>{
               val sex = y._1
               val num = y._2
               val res1 = sex + "," + num

               //发送kafka
               val kafkapro = new util.HashMap[String,Object]()
               kafkapro.put("bootstrap.servers", "niit01:9092")
               kafkapro.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
               kafkapro.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
               val producer = new KafkaProducer[String, String](kafkapro)
               producer.send(new ProducerRecord[String, String]("pro10", res1))
               producer.close()

             }
           )
         }
       )

       line.foreachRDD(
          x => {
            val result2 = x.map(
              line => {
                val sem = line.split("\t")(5)
                (sem, 1)
              }
            ).reduceByKey(_ + _)
            result2.foreach(println(_))
            result2.foreach(
              y => { //y--定义的局部变量  形式为("java",10)
                val seme = y._1 //("java",10)的第一个元素key值
                val num = y._2
                val res2 = seme + "," + num
                //发送到kafka中
                val kafkapro = new util.HashMap[String, Object]() //java.util的包
                kafkapro.put("bootstrap.servers", "niit01:9092")
                kafkapro.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
                kafkapro.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
                val producer = new KafkaProducer[String, String](kafkapro)
                producer.send(new ProducerRecord[String, String]("pro9", res2))
                producer.close()
              }
            )
          }
        )

      line.foreachRDD(
        x => {
          val result3 = x.map(
            line => {
              val sex = line.split("\t")(2)
              val enro = line.split("\t")(6)
              var x =" "+","+" "
              if (enro == "L" && sex == "1") {
                x = enro +","+ sex
              }
              else if (enro == "L" && sex == "2") {
                x = enro +","+ sex
              }
              else if (enro == "A" && sex == "2") {
                x = enro +","+ sex
              }
              else if (enro == "A" && sex == "1") {
                x = enro +","+ sex
              }
              (x, 1)
            }
          ).reduceByKey(_+_);
          result3.foreach(println(_))
          result3.foreach(
            y => {
              val en= y._1
              val num = y._2
              val res3 = en + "," + num
              //发送kafka
              val kafkapro = new util.HashMap[String, Object]()
              kafkapro.put("bootstrap.servers", "niit01:9092")
              kafkapro.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
              kafkapro.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
              val producer = new KafkaProducer[String, String](kafkapro)
              producer.send(new ProducerRecord[String, String]("pro14", res3))
              producer.close()

            }
          )
        }
      )
      line.foreachRDD(
        x => {
          val result4 = x.map(
            line => {
              val sex = line.split("\t")(2)
              val enro = line.split("\t")(6)
              var x = " "
              if (enro == "L") {
                x = enro
              }
              else if (enro == "A") {
                x = enro
              }
              (x, 1)
            }
          ).reduceByKey(_ + _);
          result4.foreach(println(_))
          result4.foreach(
            y => {
              val en = y._1
              val num = y._2
              val res3 = en + "," + num
              //发送kafka
              val kafkapro = new util.HashMap[String, Object]()
              kafkapro.put("bootstrap.servers", "niit01:9092")
              kafkapro.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
              kafkapro.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
              val producer = new KafkaProducer[String, String](kafkapro)
              producer.send(new ProducerRecord[String, String]("pro13", res3))
              producer.close()

            }
          )
        }
      )


      ssc.start()
      ssc.awaitTermination()
    }


}
