import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}

object RealTimeOrderStatsApp {

  def main(args: Array[String]): Unit = {
    runRealTimeOrderStats()
  }

  def runRealTimeOrderStats(): Unit = {
    // 设置Spark Streaming配置
    val sparkConf = new SparkConf().setAppName("RealTimeOrderStats").setMaster("local[*]")
    val ssc = new StreamingContext(sparkConf, Seconds(2)) // 每隔2秒进行一次批处理
    val spark = SparkSession.builder.config(sparkConf).getOrCreate()

    import spark.implicits._

    // Kafka消费者配置
    val kafkaParams = Map[String, Object](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.75.132:9092",
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.GROUP_ID_CONFIG -> "niit",
      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "latest",
      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> (false: java.lang.Boolean)
    )

    // 订阅的Kafka主题
    val topics = Array("orders")
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
    )

    // 处理接收到的消息
    val orderDStream = stream.map(record => {
      val fields = record.value.split("\t")
      if (fields.length == 5) Order(fields(0), fields(1), fields(2).toInt, fields(3), fields(4))
      else null
      //    String data = orderType + "\t" + orderName + "\t" + orderQuantity + "\t" + orderDate + "\t" + isValid;
    }).filter(_ != null)

    // 统计所有Valid和Invalid订单号的总和
    val globalValidInvalidCounts = orderDStream.map(order => (order.isValid, 1))
      .reduceByKey(_ + _)
    globalValidInvalidCounts.print()

    // 统计各个订单号各自的Valid和Invalid数量
    val productValidInvalidCounts = orderDStream.map(order => ((order.productName, order.isValid), 1))
      .reduceByKey(_ + _)
    productValidInvalidCounts.print()

    // 统计所有订单类别的数量
    val categoryCounts = orderDStream.map(order => (order.category, 1)).reduceByKey(_ + _)
     categoryCounts.print()

    // 统计各个订单类别的Valid数和Invalid数的数量
    val categoryValidInvalidCounts = orderDStream.map(order => ((order.category, order.isValid), 1))
      .reduceByKey(_ + _)
     categoryValidInvalidCounts.print()

    // 统计各个订单的各个类别的Valid和Invalid数量
    val categoryProductValidInvalidCounts = orderDStream.map(order => (((order.category, order.productName), order.isValid), 1))
      .reduceByKey(_ + _)
     categoryProductValidInvalidCounts.print()

    // 将结果推送到新的Kafka topic
    //统计所有Valid和Invalid订单号的总和
    //Y       8
    //N       2
    globalValidInvalidCounts.foreachRDD { rdd =>
      rdd.foreachPartition { partition =>
        val producer = createKafkaProducer()
        try {
          partition.foreach { case (isValid, count) =>
            val message = s"$isValid\t$count"
            producer.send(new ProducerRecord[String, String]("order_valid_invalid_count", null, message))
          }
        } finally {
          producer.close()
        }
      }
    }



//
    //    统计各个订单号各自的Valid和Invalid数量
    //Product2        N       1
    //Product1        N       1
    productValidInvalidCounts.foreachRDD { rdd =>
      rdd.foreachPartition { partition =>
        val producer = createKafkaProducer()
        try {
          partition.foreach { case ((productName, isValid), value) =>
            val message = s"$productName\t$isValid\t$value"
            producer.send(new ProducerRecord[String, String]("product_order_stats", null, message))

          }
        } finally {
          producer.close()
        }
      }
    }

    // 统计所有订单类别的数量
    //电子    2
    //图书    1
    categoryCounts.foreachRDD { currentRDD =>
      currentRDD.foreachPartition { currentPartition =>
        val kafkaProducer = createKafkaProducer()
        try {
          currentPartition.foreach { case (category, count) =>
            val message = s"$category\t$count"
            kafkaProducer.send(new ProducerRecord[String, String]("category_order_counts", null, message))
          }
        } finally {
          kafkaProducer.close()
        }
      }
    }


    // 统计各个订单类别的Valid数和Invalid数的数量
    //服装    Y      2
    //游戏    N      2
    categoryValidInvalidCounts.foreachRDD { rdd =>
      rdd.foreachPartition { partition =>
        val producer = createKafkaProducer()
        try {
          partition.foreach { case ((category, isValid), value) =>
            val message = s"$category\t$isValid\t$value"
            producer.send(new ProducerRecord[String, String]("category_valid_invalid_counts", null, message))
          }
        } finally {
          producer.close()
        }
      }
    }

    // 统计各个订单的各个类别的Valid和Invalid数量
    //电子(类别)    Product4(订单)       N       1
    //家电(类别)    Product1(订单)       N       1
    categoryProductValidInvalidCounts.foreachRDD { rdd =>
      rdd.foreachPartition { partition =>
        val producer = createKafkaProducer()
        try {
          partition.foreach { case (((category, productName), isValid), value) =>
            val message = s"$category\t$productName\t$isValid\t$value"
            producer.send(new ProducerRecord[String, String]("category_product_order_stats", null, message))
          }
        } finally {
          producer.close()
        }
      }
    }

    // 开始接收数据并处理
    ssc.start()
    try {
      ssc.awaitTermination()
    } catch {
      case e: Exception =>
        e.printStackTrace()
        println("An error occurred. Shutting down the streaming context.")
        ssc.stop(stopSparkContext = true, stopGracefully = true)
    }
  }

  // 创建Kafka生产者的方法
  private def createKafkaProducer(): KafkaProducer[String, String] = {
    val props = new java.util.Properties()
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.75.132:9092")
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
    new KafkaProducer[String, String](props)
  }

  // 定义Order类
  case class Order(category: String, productName: String, quantity: Int, date: String, isValid: String)
}