package com.oscar.recommender

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

// 定义样例类
/**
  *bookid,title,author,img_url,bookClass,bookPrice,pubtime,detail,codeCount
  * userId,bookid,rating,timestamp
  */
case class Book(bookId: Int, title: String, author:String, imageUrl: String, bookClass: String, pubtime: String)
case class Rating(userId: Int, bookId: Int, score: Double, timestamp: Int)

case class MySQLConfig( url: String,user: String,password: String )

object DataLoader {
  // 定义数据文件路径
  val Book_DATA_PATH = "D:\\IdeaProjects\\bookrecommendsys\\calculation\\DataLoader\\src\\main\\resources\\books.csv"
  val RATING_DATA_PATH = "D:\\IdeaProjects\\bookrecommendsys\\calculation\\DataLoader\\src\\main\\resources\\ratings.csv"
  // 定义mysql中存储的表名
  val MYSQL_BOOKS = "Book"
  val MYSQL_RATING = "Rating"

  def main(args: Array[String]): Unit = {
    //定义一些配置信息
    val config = Map(
      "spark.cores" -> "local[*]",
      "mysql.url" -> "jdbc:mysql://localhost:3306/recommender",
      "mysql.user" -> "root",
      "mysql.password" -> "123456"
    )
    // 创建一个spark config
    val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("DataLoader")
    // 创建spark session
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()
    import spark.implicits._


    // 加载数据
    val ratingRDD = spark.sparkContext.textFile(RATING_DATA_PATH)
    val ratingDF = ratingRDD.map( item => {
      val attr = item.split(",")
      Rating( attr(0).toInt, attr(1).toInt, attr(2).toDouble, attr(3).toInt )
    } ).toDF()
    ratingDF.show(truncate = false)//完整展示数据库表的字段，默认前二十条

    val bookRDD = spark.sparkContext.textFile(Book_DATA_PATH)
    val bookDF = bookRDD.map( item => {
      // product数据通过,分隔，切分出来
      val attr = item.split(",")
      // 转换成Product
      Book( attr(0).toInt, attr(2).trim, attr(3).trim, attr(4).trim, attr(5).trim, attr(7).trim )
    } ).toDF()
    bookDF.show()


    // 声明一个隐式的配置对象
    implicit val mysqlConfig =
      MySQLConfig(config.get("mysql.url").get,config.get("mysql.user").get,config.get("mysql.password").get)
    // 将数据保存到MySQL中
    storeDataInMySQL(bookDF, ratingDF)


    // 关闭Spark
    spark.stop()

  }

  def storeDataInMySQL(bookDF: DataFrame, ratingDF: DataFrame)(implicit mysqlConfig: MySQLConfig): Unit = {
    bookDF.write
      .mode("overwrite")
      .format("jdbc")
      .option("url", mysqlConfig.url)
      .option("dbtable", MYSQL_BOOKS)
      .option("user", mysqlConfig.user)
      .option("password", mysqlConfig.password)
      .save()

    ratingDF.write
      .mode("overwrite")
      .format("jdbc")
      .option("url", mysqlConfig.url)
//      .option("driver", "com.mysql.jdbc.Driver")
      .option("dbtable", MYSQL_RATING)
      .option("user", mysqlConfig.user)
      .option("password", mysqlConfig.password)
      .save()

  }

}

