package spark

import java.sql.{PreparedStatement, ResultSet}
import java.text.SimpleDateFormat
import java.util.{Calendar, Date}
import util.control.Breaks._

import com.google.gson.Gson
import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.DoubleType
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

object test {
  def main(args: Array[String]): Unit = {
    //local中的数字是cpu的核数，如果写*的话表示全部使用，本地模式使用local
    //appName是自定义的，没有特殊的作用
    //yarn模式(生产) yarn cluster模式(生产集群) standalone模式，cluster模式(集群)
    var conf = new SparkConf().setMaster("local[*]").setAppName("kafka")


    val ssc = new StreamingContext(conf, Seconds(5))

    // kafka properties
    val topicsSet = Array("test")
    val kafkaParams = mutable.HashMap[String, String]()
    kafkaParams.put("bootstrap.servers", "192.168.2.12:9092")
    kafkaParams.put("group.id", "test-consumer-group")
    kafkaParams.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    kafkaParams.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topicsSet, kafkaParams
      )
    )

    var rs: ResultSet = null
    var pstmt: PreparedStatement = null
    var resultFlag: Boolean = false

    /**
     * 处理JSON字符串为case class 生成RDD[case class] 然后直接转成DataFrame
     */
    stream.map(record => handleMessage2CaseClass(record.value())).foreachRDD(rdd => {
      val spark = SparkSession.builder().config(rdd.sparkContext.getConf).getOrCreate()
      val df = spark.createDataFrame(rdd)

      //print(spark)
      //df.show()

      if (!df.rdd.isEmpty) { // begin processing


        //        /** 按年份查询 */
        //        // 清空表格
        //        val sql = s"DELETE FROM year_avg"
        //        resultFlag = MySQLConnect.conn().createStatement().execute(sql)
        //        for (x <- 0 to 4) {
        //          val year = 2016 + x // 2016 2017 2018 2019 2020
        //          // println(year)
        //          // avg(评论人数+好评比)
        //          val year_review = df.filter("release_date is not null and game_review_count != 0 and positive_review_ratio != 0").select("game_review_count", "positive_review_ratio").filter(s"release_date between ${year}0000 and ${year}1231").agg(avg("game_review_count"), avg("positive_review_ratio"))
        //          // year_review.show()
        //          year_review.foreach(row => {
        //            val sql = s"INSERT INTO year_avg VALUE(?,?,?)"
        //            pstmt = MySQLConnect.conn().prepareStatement(sql)
        //            pstmt.setString(1, year.toString)
        //            pstmt.setDouble(2, row(0).asInstanceOf[Double])
        //            pstmt.setDouble(3, row(1).asInstanceOf[Double])
        //            resultFlag = pstmt.execute()
        //          })
        //        }

        /** 按月份查询 */
        // 清空表格
        val sql = s"DELETE FROM year_avg"
        resultFlag = MySQLConnect.conn().createStatement().execute(sql)

        val cal = Calendar.getInstance()
        for (x <- 1 to 13) { // 最近12个月
          // 获取轮次月
          cal.add(2, -1) // 每轮次减去1个月

          // 除去本月 消耗次数 相当于continue
          breakable {
            if (x == 1) {
              break
            }
            // println(x,cal.get(Calendar.YEAR),cal.get(Calendar.MONTH)+1) //MONTH从0开始
            val formatTemp = new SimpleDateFormat("yyyyMMdd")
            var firstday, lastday: String = ""
            val cal1 = Calendar.getInstance()
            cal1.set(Calendar.MONTH, cal.get(Calendar.MONTH) + 1) // 轮次中的月份
            cal1.set(Calendar.YEAR, cal.get(Calendar.YEAR)) // 轮次月中年份

            // 获取轮次月第一天
            cal1.add(Calendar.MONTH, 0)
            cal1.set(Calendar.DAY_OF_MONTH, 1)
            firstday = formatTemp.format(cal1.getTime)

            // 获取轮次月最后一天
            cal1.add(Calendar.MONTH, 1)
            cal1.set(Calendar.DAY_OF_MONTH, 0)
            lastday = formatTemp.format(cal1.getTime)

            // println(firstday, lastday)
            val month_review = df.filter("release_date is not null and game_review_count != 0 and positive_review_ratio != 0").select("game_review_count", "positive_review_ratio").filter(s"release_date >= $firstday and release_date <= $lastday").agg(avg("game_review_count"), avg("positive_review_ratio"))
            // month_review.show()
            val monthStr: String = firstday + "-" + lastday
            month_review.foreach(row => {
              val sql = s"INSERT INTO month_avg VALUE(?,?,?)"
              pstmt = MySQLConnect.conn().prepareStatement(sql)
              pstmt.setString(1, monthStr)
              pstmt.setDouble(2, row(0).asInstanceOf[Double])
              pstmt.setDouble(3, row(1).asInstanceOf[Double])
              resultFlag = pstmt.execute()
            })
          }

        }

      } // end of processing
    }
    )


    ssc.start()
    ssc.awaitTermination()
  }

  /** json Object */
  def handleMessage2CaseClass(jsonStr: String): KafkaMessage = {
    val gson = new Gson()
    gson.fromJson(jsonStr, classOf[KafkaMessage])
  }

  case class KafkaMessage(game_name: String, game_price: Int, price_discount: Double, release_date: String, game_review_count: Int, positive_review_ratio: Double, game_type: String, game_about: String, img_src: String)

}