package com.cw.recommend.dataLoad

import com.cw.recommend.common.Runner.{RunnerConfig, runSpark}
import com.cw.recommend.common.constant._
import com.cw.recommend.common.model.{Product, UserRating}
import com.mongodb.casbah.commons.MongoDBObject
import org.apache.spark.sql.DataFrame


object DataLoader {

  def main(args: Array[String]): Unit = {

    implicit val runnerConfig = RunnerConfig("DataLoader")

    runSpark { spark =>
      import spark.implicits._
      val productRDD = spark.sparkContext.textFile(PRODUCT_DATA_PATH)
      val productDF = productRDD.map { item =>
        val attr = item.split("\\^")
        Product(attr(0).toInt, attr(1).trim, attr(4).trim, attr(5).trim, attr(6).trim)
      }.toDF

      val ratingRDD = spark.sparkContext.textFile(RATING_DATA_PATH)
      val ratingDF = ratingRDD.map { item =>
        val attr = item.split(",")
        UserRating(attr(0).toInt, attr(1).toInt, attr(2).toDouble, attr(3).toInt)
      }.toDF

      storeDataInMongoDB(productDF, ratingDF)
    }


  }

  def storeDataInMongoDB(product: DataFrame, rating: DataFrame) = {

    import com.cw.recommend.common.util.MongoDBUtil._
    clientContext { cli =>
      val productCollection = cli(MONGODB_DB).getCollection(PRODUCT_COLLECTION)
      val ratingCollection = cli(MONGODB_DB).getCollection(RATING_COLLECTION)
      product.sinkMongoDB(PRODUCT_COLLECTION)
      rating.sinkMongoDB(RATING_COLLECTION)
      productCollection.createIndex(MongoDBObject("productId" -> 1))
      ratingCollection.createIndex(MongoDBObject("userId" -> 1))
      ratingCollection.createIndex(MongoDBObject("productId" -> 1))
    }
  }


}