package com.niit.sparkAnalyze.sparkSQL

import org.apache.spark.sql.SparkSession

object sparkSql {
  def sparkSql(rdd: org.apache.spark.rdd.RDD[(String, String, Int, String, String)]): String = {
    if (!rdd.isEmpty()) {
      val spark = SparkSession.builder.getOrCreate()
      import spark.implicits._
      val ordersDataFrame = rdd.map(record => {
        val orderCategory = record._1
        val productName = record._2
        val productNumber = record._3
        val orderDate = record._4
        val isValid = record._5
        (orderCategory, productName, productNumber, orderDate, isValid)
      }).toDF("orderCategory", "productName", "productNumber", "orderDate", "isValid")
      ordersDataFrame.createOrReplaceTempView("ordersTable")
      var resultYNum: Long = 0L
      var resultNNum: Long = 0L

      val resultYRow = spark.sql("SELECT SUM(productNumber) FROM ordersTable WHERE isValid = 'Y'").collect().headOption
      resultYNum = resultYRow match {
        case Some(row) =>
          val firstElement = Option(row.getAs[Long](0))
          firstElement.getOrElse(0L)
        case None =>
          0L
      }

      val resultNRow = spark.sql("SELECT SUM(productNumber) FROM ordersTable WHERE isValid = 'N'").collect().headOption
      resultNNum = resultNRow match {
        case Some(row) =>
          val firstElement = Option(row.getAs[Long](0))
          firstElement.getOrElse(0L)
        case None =>
          0L
      }
      println(s"(sparkSQL)有效订单总数: $resultYNum, 无效订单总数: $resultNNum")
      s"有效订单总数: $resultYNum, 无效订单总数: $resultNNum"
    } else {
      "RDD is empty"
    }
  }
}
