package com.niit.sparkRdd
import java.util.{HashMap, Properties}

import com.alibaba.fastjson.JSON
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import com.niit.Utils.Kafka
import com.niit.sparkstreaming.LogdataShow.StationLog
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ListBuffer

/**
 * @Author:xuyuanhong
 * @Date:Created in 2022/12/5 7:59
 */
object Rdd {
  def main(args: Array[String]): Unit = {
//    val group = "niit06"
//    // topic 名字
//    val topic = "class"
//
    val conf = new SparkConf().setAppName("Rdd").setMaster("local[*]")
//
//
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("sparkRDD")
//    val ssc = new StreamingContext(conf, Seconds(5))


    // 打印日志级别 为 error
//    ssc.sparkContext.setLogLevel("error")
    // 创建 Spark 上下文环境对象（连接对象）

    val sc: SparkContext = new SparkContext(sparkConf)
    val line: RDD[String] = sc.textFile("C:\\Users\\huawei\\Desktop\\2.json")
    //将json转换成对象
    // kafkaParams  kfkparams
    val kfkparams = new HashMap[String, Object]()
    kfkparams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "niit01:9092")
    kfkparams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    kfkparams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

    val producer = new KafkaProducer[String, String](kfkparams)
    val ALLStatusRDD: RDD[(String, Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[(String, Int)]
        for (json <- jsonObjIter) {
          val value: StationLog = JSON.parseObject(json, classOf[StationLog])
          listBuffer.append((value.Status, 1))
        }
        listBuffer.iterator
      }
    )
    val Sta_Sex: RDD[((String, String), Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[((String, String), Int)]
        for (json <- jsonObjIter) {
          val value: StationLog = JSON.parseObject(json, classOf[StationLog])
          listBuffer.append(((value.Status, value.Sex), 1))
        }
        listBuffer.iterator
      }
    )
    Sta_Sex.foreach(println)
    val Id_Sex: RDD[((String, String), Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[((String, String), Int)]
        for (json <- jsonObjIter) {
          val value: StationLog = JSON.parseObject(json, classOf[StationLog])
          listBuffer.append(((value.ID, value.Sex), 1))
        }
        listBuffer.iterator
      }
    )
    Id_Sex.foreach(println)


    val valueStatusRDD: RDD[(String,Int)] = ALLStatusRDD.reduceByKey(_ + _)
    val res = valueStatusRDD.foreach(word =>{
      println("word======="+word)
      val sex = word._1.toString
      val count = word._2.toString
      val result = sex +"," + count
      println("result======="+result)


      // producer 功能 写入到 kafka
      val kafkaParams = new HashMap[String,Object]()
      kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"niit01:9092")
      kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
      kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")

      val producer = new  KafkaProducer[String,String](kafkaParams)

      producer.send(new ProducerRecord[String,String]("test",result.toString))

    }
    )

    val Sta_Sexend: RDD[((String,String),Int)] = Sta_Sex.reduceByKey(_ + _)
    val res1 = Sta_Sexend.foreach(word =>{
      //      println("word======="+word)

      val status = word._1.toString
      val count = word._2.toString
      val result = status +"," + count
      println("result======="+result)
      // producer 功能 写入到 kafka
      val kafkaParams = new HashMap[String,Object]()
      kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"niit01:9092")
      kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
      kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")

      val producer = new  KafkaProducer[String,String](kafkaParams)

      producer.send(new ProducerRecord[String,String]("sta1",result.toString))
      //

    }
    )


//    producer.send(new ProducerRecord[String,String]("test",result.toString))
//    println(res)
    val Id_Sexend: RDD[((String,String),Int)] = Id_Sex.reduceByKey(_ + _)
    val res2 = Id_Sexend.foreach(word =>{
//      println("word======="+word)

        val status = word._1.toString
        val count = word._2.toString
        val result = status +"," + count
        println("result======="+result)
            // producer 功能 写入到 kafka
        val kafkaParams = new HashMap[String,Object]()
        kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"niit01:9092")
        kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
        kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")

        val producer = new  KafkaProducer[String,String](kafkaParams)

        producer.send(new ProducerRecord[String,String]("class",result.toString))
      //

    }
    )

//    producer.send(new ProducerRecord[String,String]("test",valueStatusRDD.toString))
//    Kafka.send("test",Endvalue.toString)





  }
}
