package load

import com.mongodb.casbah.commons.MongoDBObject
import com.mongodb.casbah.{MongoClient, MongoClientURI}
import config.MongoConfig
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * 加载数据的核心方法
 */
object DataLoader {
  //集合常量
  val RATTING_COLLECTION = "Rating"
  val PRODUCT_COLLECTION = "Product"

  def main(args: Array[String]): Unit = {
//    //Mongo的连接配置
//    val configMap = Map(
//      "url" -> "mongodb://big_data:Bigdata%402023@dds-bp1a4fbded1f3ee41316-pub.mongodb.rds.aliyuncs.com:3717/big_data",
//      "db" -> "big_data"
//    )
//    //定义一个隐士参数
//    implicit val config = MongoConfig(configMap.getOrElse("url", ""), configMap.getOrElse("db", ""))
    //定义一个隐士参数
    implicit val config = MongoConfig()
    //获取SparkSQL的上下文
    val spark = SparkSession.builder()
      .appName("dataloader")
      .master("local[*]") //如果往服务器部署，需要修改一下即可
      .getOrCreate()

    //引入SparkSQL的隐士转换
    import spark.implicits._
    //构建ProductDF
    val productDF = spark.sparkContext.textFile("C:\\Users\\花于陌上开\\Documents\\WeChat Files\\wxid_n43uqd9hcsi422\\FileStorage\\File\\2023-06\\products.csv")
      .filter(_.length > 0)
      .map(line => {
        //将每行数据按照^进行拆分
        val fileds = line.split("\\^")
        //封装数据
        Product(fileds(0).trim.toInt, fileds(1).trim, fileds(4).trim, fileds(5).trim, fileds(6).trim)
      }
      )
      .toDF()
    productDF.show()

    //构建RatingDF
    val ratingDF = spark.sparkContext.textFile("C:\\Users\\花于陌上开\\Documents\\WeChat Files\\wxid_n43uqd9hcsi422\\FileStorage\\File\\2023-06\\ratings.csv")
      .filter(_.length > 0)
      .map(line => {
        //将每行数据按照^进行拆分
        val fileds = line.split("\\,")
        //封装数据
        Rating(fileds(0).trim.toInt, fileds(1).trim.toInt, fileds(2).trim.toDouble, fileds(3).trim.toLong)
      }
      )
      .toDF()
    ratingDF.show()

    //将productDF和ratingDF写入MongoDB中
    writeDataToMongoDB(productDF, ratingDF)

    //关闭spark
    spark.stop()
  }

  /**
   * 将productDF和ratingDF写入数库的集合中
   *
   * @param spark
   * @param productDF
   * @param ratingDF
   * @param config
   */
  def writeDataToMongoDB(productDF: DataFrame, ratingDF: DataFrame)(implicit config: MongoConfig): Unit = {
    //获取Mongo的client
    val client = MongoClient(MongoClientURI(config.url))
    //获取操作集合
    val productCollection = client(config.db)(RATTING_COLLECTION)
    val ratingCollection = client(config.db)(RATTING_COLLECTION)

    //删除集合
    productCollection.dropCollection()
    ratingCollection.dropCollection()

    //将productDF写入collection中
    productDF
      .write
      .option("uri", config.url)
      .option("collection", PRODUCT_COLLECTION)
      .mode(SaveMode.Overwrite)
      .format("com.mongodb.spark.sql")
      .save()

    //将ratingDF写入collection中
    ratingDF
      .write
      .option("uri", config.url)
      .option("collection", RATTING_COLLECTION)
      .mode(SaveMode.Overwrite)
      .format("com.mongodb.spark.sql")
      .save()

    //对数据创建索引
    productCollection.createIndex(MongoDBObject("productID" -> 1))
    ratingCollection.createIndex(MongoDBObject("productID" -> 1))
    ratingCollection.createIndex(MongoDBObject("userID" -> 1))
  }
}
