package com.niit.sparkAnalyze.sparkStreaming
object ProductNum {
  //订单是否有效的总数 需求（2
//  case class proInfo(ordercategory:String,prodname:String,num:Int,date:Int,invali:String)
  def productNum(rdd:org.apache.spark.rdd.RDD[(String, String, Int, String, String)]): String = {

        val rdd1 = rdd.map(record =>{
          val isValid = record._5
          var x = "0"
          if (isValid == "Y"){
            x = isValid
          }else if(isValid=="N"){
            x = isValid
          }
          (x,1)
        }).reduceByKey(_+_)
  val orderNumValueMap = rdd.map(record => record._5 -> record._3.toLong).collectAsMap()
    val res_rdd = rdd1.map{case (x, count) => (x, count)}
    val res_strp = res_rdd.map { case (x, count) =>
      val orderNumValue = orderNumValueMap.getOrElse(x, 0L)
      s"""[状态:$x,状态数量:${count * orderNumValue}]"""
    }.collect().mkString(",")
    if(!rdd.isEmpty())
    println("(sparkStreaming)"+res_strp)
    res_strp
//    rdd1.foreach(println)

}
}
