package com.niit.spark.sparkRDD

import com.alibaba.fastjson.JSON
import com.niit.data.data
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer
case class ValueCorrect(is_correct:String,num:Int)
case class ValueCategory(category:String,num :Int)
case class ALLCategory(category:String, is_correct: String,num:Int)
/**
 * @author 杨铭
 *         2022/11/19,10:11
 */
object rdd {
  val config = Map(
    "spark.cores" -> "local[*]",
    "mysql.url" -> "jdbc:mysql://localhost:3306/niitspark?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai",
    "mysql.driver" -> "com.mysql.cj.jdbc.Driver",
    "mysql.user" -> "root",
    "mysql.password" -> "mysqlym123"
  )
  def main(args: Array[String]): Unit = {
    val sparkConf =new SparkConf().setMaster("local[*]").setAppName("sparkRDD")
    // 创建 Spark 上下文环境对象（连接对象）
    val sc: SparkContext = new SparkContext(sparkConf)
    val line: RDD[String] = sc.textFile("data/order.json")
    //将json转换成对象
    val ALLCorrectRDD: RDD[(Char, Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[(Char, Int)]
        for (json <- jsonObjIter) {
          val value: data = JSON.parseObject(json, classOf[data])
          listBuffer.append((value.isCorrect, 1))
        }
        listBuffer.iterator
      }
    )
    val value1: RDD[((String, Char), Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[((String, Char), Int)]
        for (json <- jsonObjIter) {
          val value: data = JSON.parseObject(json, classOf[data])
          listBuffer.append(((value.orderCategory, value.isCorrect), 1))
        }
        listBuffer.iterator
      }
    )
    //    value1.foreach(println)

    val CategoryRDD: RDD[(String, Int)] = line.mapPartitions(
      jsonObjIter => {
        val listBuffer = new ListBuffer[(String, Int)]
        for (json <- jsonObjIter) {
          val value: data = JSON.parseObject(json, classOf[data])
          listBuffer.append((value.orderCategory, 1))
        }
        listBuffer.iterator
      }
    )
    val valueCorrectRDD: RDD[(Char, Int)] = ALLCorrectRDD.reduceByKey(_ + _)
    val valueCategoryRDD: RDD[(String, Int)] = CategoryRDD.reduceByKey(_ + _)
    val ALLCategoryRDD: RDD[((String, Char), Int)] = value1.reduceByKey(_ + _)
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    import sparkSession.implicits._

    val valueCorrectDF: DataFrame = valueCorrectRDD.map(
      rdd => {
        val value1: String = rdd._1.toString
        val value2: Int = rdd._2
        ValueCorrect(value1, value2)
      }
    ).toDF()

    valueCorrectDF.write
      .format("jdbc")
      .mode(SaveMode.Append)
      .option("driver",config("mysql.driver"))
      .option("dbtable","valueCorrect")
      .option("url", config("mysql.url"))
      .option("user", config("mysql.user"))
      .option("password", config("mysql.password"))
      .save()
    println("111111111111")



    val valueCategoryDF: DataFrame = valueCategoryRDD.map(
      rdd => {
        ValueCategory(rdd._1.toString, rdd._2)
      }
    ).toDF()

    valueCategoryDF.write
      .format("jdbc")
      .mode(SaveMode.Append)
      .option("driver",config("mysql.driver"))
      .option("dbtable","valueCategory")
      .option("url", config("mysql.url"))
      .option("user", config("mysql.user"))
      .option("password", config("mysql.password"))
      .save()
    println("2222222")

    val ALLCategoryDF: DataFrame = ALLCategoryRDD.map(
      rdd => {
        ALLCategory(rdd._1._1.toString, rdd._1._2.toString, rdd._2.toInt)
      }
    ).toDF()

    ALLCategoryDF.write
      .format("jdbc")
      .mode(SaveMode.Append)
      .option("driver",config("mysql.driver"))
      .option("dbtable","allCategory")
      .option("url", config("mysql.url"))
      .option("user", config("mysql.user"))
      .option("password", config("mysql.password"))
      .save()
    println("33333333333")
    sparkSession.stop()

  }
}
