package read.niit.service


import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.streaming.dstream.DStream
import read.niit.bean.Reader
import read.niit.util.SparkUtil

class DataReaderService {
  val spark = SparkUtil.takeSpark()
  import spark.implicits._
  import org.apache.spark.sql.functions._

  def dataAnalysis(reader: DStream[Reader]): Unit = {

    realTimeByOwnDefinedTime(reader)

  }

  // 统计4.23日当天不同年级学生借阅书籍情况
  private def realTimeByOwnDefinedTime(reader: DStream[Reader]): Unit = {
    val mapDS: DStream[(String,(String,String))]= reader.map(data => {
      val timeOfOwnDefined = data.date_id.split("_")(1)
      if (timeOfOwnDefined.equals("4.23")) {
        (data.date_id, (data.grade_id, data.read_subject_id))
      } else {
        null
      }
    })
    mapDS.foreachRDD(rdd => {
      val result: RDD[(String, (String, String))] = rdd.filter(rdd => rdd != null)
      println("---------实时统计4.23日当天不同年级学生借阅书籍情况----------")
      result.collect().foreach(println)
      val value: RDD[(String, String, String)] = result.map(rdd => {
        (rdd._1, rdd._2._1,rdd._2._2)
      })
      val resultDF = value.toDF("date_id","grade_id","read_subject_id")

      resultDF.write
        .format("jdbc")
        .option("url","jdbc:mysql://node1:3306/BD2?useUnicode=true&characterEncoding=utf8")
        .option("driver","com.mysql.jdbc.Driver")
        .option("user","root")
        .option("password","Niit@123")
        .option("dbtable","definedTimeTable")//写到User2表里面
        .mode(SaveMode.Append)//追加模式，如果该表不存在就会自动的创建
        .save()
    })
  }






}
