package com.test.statistics.recommender

import com.mongodb.casbah.commons.MongoDBObject
import com.mongodb.casbah.{MongoClient, MongoClientURI}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/*
13316   商品ID
Kingston 金士顿 DataTraveler 101 G2 32GB 优盘  商品名称
1057	268	378   商品分类ID，不需要
B003TJ99M4  亚马逊ID， 不需要
https://images-cn-4.ssl-images-amazon.com/images/I/41nIbzEKQCL._SY300_QL70_.jpg 商品的图片url
存储设备|U盘|电脑/办公 商品分类
优盘|金士顿|好用|容量挺大的|速度快 商品UGC标签
 */

// 定义一个样例类
case class Product( productId: Int, name: String, imageUrl: String ,categories: String, tags: String)
/*
*Rating 数据集
42103	用户ID
* 457976	商品ID
* 4.0	评分
* 1215878400 时间戳
*
 */
case class Rating(userId: Int, productId: Int,score: Double, timestamp:Int)

//mongodb连接配置
case class  MongoConfig(uri: String, db :String)

object DataLoader {
val PRODUCT_DATA_PATH="E:\\projects\\emco-recommend-system\\recommender\\DataLoader\\src\\main\\resources\\products.csv"
  val RATING_DATA_PATH="E:\\projects\\emco-recommend-system\\recommender\\DataLoader\\src\\main\\resources\\ratings.csv"
 //定义mongodb中存储的表名
  val MONGO_PRODUCT_COLLECTION="Product"
  val MONGO_RATING_COLLECTION="Rating"


  def main(args: Array[String]): Unit = {

    val config=Map(
      "spark.cores"->"local[*]",
      "mongo.uri"->"mongodb://localhost:27017/recommender",
      "mongo.db"->"recommender"
    )
    //创建一个spark config
    val  sparkConf=new SparkConf().setMaster(config("spark.cores")).setAppName("DataLoader")
    //创建一个spark session
    val spark=SparkSession.builder().config(sparkConf).getOrCreate()

    import  spark.implicits._

    //加载数据
    val productRDD=spark.sparkContext.textFile(PRODUCT_DATA_PATH)
    val productDF=productRDD.map(item=>{
      //product数据通过^分割
      val attr =item.split("\\^")
      //转成Product
      Product(attr(0).toInt,attr(1).trim,attr(4).trim,attr(5).trim,attr(6).trim)
    }).toDF()

    val ratingRDD=spark.sparkContext.textFile(RATING_DATA_PATH)
    val ratingDF = ratingRDD.map( item => {
      val attr = item.split(",")
      Rating( attr(0).toInt, attr(1).toInt, attr(2).toDouble, attr(3).toInt )
    } ).toDF()

    //定义隐式的mongo连接配置，免得每次调用都要连接一次
    implicit val mongoConfig=MongoConfig(config("mongo.uri"),config("mongo.db"))

    storeDataInMongoDB(productDF,ratingDF)
//    spark.stop()
  }
  def storeDataInMongoDB(productDF:DataFrame,ratingDF:DataFrame)(implicit mongoConfig: MongoConfig):Unit ={
  //新建一个mongodb连接
    val mongoClient=MongoClient(MongoClientURI(mongoConfig.uri))
    //定义要操作的表db.Product
    val productCollection=mongoClient(mongoConfig.db)(MONGO_PRODUCT_COLLECTION)
    val ratingCollection=mongoClient(mongoConfig.db)(MONGO_RATING_COLLECTION)

    //如果表存在，则删掉
    productCollection.dropCollection()
    ratingCollection.dropCollection()

    //将数据存入对应的表中
    productDF.write
      .option("uri",mongoConfig.uri)
      .option("collection",MONGO_PRODUCT_COLLECTION)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()

    ratingDF.write
      .option("uri", mongoConfig.uri)
      .option("collection", MONGO_RATING_COLLECTION)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()

    // 对表创建索引
    productCollection.createIndex( MongoDBObject( "productId" -> 1 ) )
    ratingCollection.createIndex( MongoDBObject( "productId" -> 1 ) )
    ratingCollection.createIndex( MongoDBObject( "userId" -> 1 ) )

    mongoClient.close()

  }
}
