package com.niit.sparkAnalyze.sparkStreaming
object orderNum {
  //各个订单类别内是否有效的数量  需求（3
  //  case class proInfo(ordercategory:String,prodname:String,num:Int,date:Int,invali:String)
  def orderNum(rdd:org.apache.spark.rdd.RDD[(String, String, Int, String, String)]): String = {
    val rdd1 = rdd.map(record => {
      val category = record._1
      val isValid = record._5
      val status = if (isValid == "Y") "有效" else "无效"
      (category, status, 1)
    }).filter { case (_, status, _) => status == "有效" || status == "无效" }
      .map { case (category, status, _) => ((category, status), 1) }
      .reduceByKey(_ + _)

    val orderNumValueMap = rdd.map(record => record._1 -> record._3.toLong).collectAsMap()

    val res_rdd = rdd1.map{case ((category, status), count) => (category, status, count)}
    val res_str = res_rdd.map{case (category, status, count) =>
      val orderNumValue = orderNumValueMap.getOrElse(category, 0L)
      s"""[类别:$category,是否有效:$status,状态数量:${count * orderNumValue}]"""
    }.collect().mkString(",")
    if(!rdd.isEmpty())
    println("(sparkStreaming)"+res_str)
    res_str
  }
}
