package com.xyh.sparkRdd

import java.util.{HashMap, Properties}

import com.alibaba.fastjson.JSON
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import com.niit.Utils.Kafka
import com.niit.sparkstreaming.LogdataShow.StationLog
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer

import scala.collection.mutable.ListBuffer

/**
 * @author Linava-吕梦歌
 * @date 2023/6/18 19:19
 *
 */
object xqRdd {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD")
    // 创建 Spark 上下文环境对象（连接对象）

    val sc: SparkContext = new SparkContext(sparkConf)
    val line: RDD[String] = sc.textFile("E:\\IntelliJ IDEA programs\\17code\\niit\\input\\1.json")
    //将json转换成对象
    // kafkaParams  kfkparams

    val Term_Sex: RDD[((String, String), Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[((String, String), Int)]
        for (json <- jsonObjIter) {
          val value: StationLog = JSON.parseObject(json, classOf[StationLog])
          listBuffer.append(((value.Term, value.Sex), 1))
        }
        listBuffer.iterator
      }
    )
    Term_Sex.foreach(println)

    val Term_Sexend: RDD[((String,String),Int)] = Term_Sex.reduceByKey(_ + _)
    val res1 = Term_Sexend.foreach(word =>{
      //      println("word======="+word)

      val status = word._1.toString
      val count = word._2.toString
      val result = status +"," + count
      println("result======="+result)

      // producer 功能 写入到 kafka
      val kafkaParams = new HashMap[String,Object]()
      kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"niit01:9092")
      kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
      kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")

      val producer = new  KafkaProducer[String,String](kafkaParams)

      producer.send(new ProducerRecord[String,String]("G",result.toString))
      //

    }
    )


    //    producer.send(new ProducerRecord[String,String]("test",result.toString))
    //    println(res)
//    val Id_Sexend: RDD[((String,String),Int)] = Id_Sex.reduceByKey(_ + _)
//    val res2 = Id_Sexend.foreach(word =>{
//      //      println("word======="+word)
//
//      val status = word._1.toString
//      val count = word._2.toString
//      val result = status +"," + count
//      println("result======="+result)
//      // producer 功能 写入到 kafka
//      val kafkaParams = new HashMap[String,Object]()
//      kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"niit01:9092")
//      kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
//      kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
//
//      val producer = new  KafkaProducer[String,String](kafkaParams)
//
//      producer.send(new ProducerRecord[String,String]("class",result.toString))
//      //
//
//    }
//    )

    //    producer.send(new ProducerRecord[String,String]("test",valueStatusRDD.toString))
    //    Kafka.send("test",Endvalue.toString)

  }
}
