package com.niit.sparkAnalyze.sparkRDD

import org.apache.spark.rdd.RDD

object EachCategoryValidNum {
  def eachCategoryValidNum(rdd: RDD[(String, String, Int, String, String)]): String = {
    if (rdd.isEmpty()) return "RDD is empty"

    val rdd1 = rdd.map(record => {
      val category = record._1
      val isValid = record._5
      if (isValid == "Y") {
        (category, 1, 0)
      } else {
        (category, 0, 1)
      }
    })
    val orderNumValueMap = rdd.map(record => record._1 -> record._3.toLong).collectAsMap()


    val modifiedRDD = rdd1.map { case (category, validCount, invalidCount) => (category, (validCount, invalidCount)) }
    val res_str = modifiedRDD.map { case (category, (validCount, invalidCount)) =>
      val orderNumValue = orderNumValueMap.getOrElse(category, 0L)
      s"""[类别:$category,有效数量:${validCount.toLong * orderNumValue},无效数量:${invalidCount.toLong * orderNumValue}]"""
    }.collect().mkString(",") // 将每个类别的统计结果转换为字符串
    println("(sparkRDD)" + res_str)
    res_str // 返回最终的字符串结果
  }
}
