package com.yomob.ml.recommand

import java.io.FileInputStream
import java.time.LocalDate
import java.time.format.DateTimeFormatter
import java.util.Properties

import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.mllib.evaluation.{RankingMetrics, RegressionMetrics}
import org.apache.spark.mllib.recommendation.{ALS, MatrixFactorizationModel, Rating}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import org.jblas.DoubleMatrix
import redis.clients.jedis.{Jedis, JedisPool, JedisPoolConfig}

import scala.collection.mutable.ArrayBuffer
import scala.util.Random

object SimpleRecommendModel {
  val userNumPrefix = "user_"
  val userClickAdPrefix = "clicked_"
  val adNumPrefix = "ad_"
  val ratingsSavePath = "/rates"
  val recommendListPrefix = "r_"
  val random = new Random()
  def cosineSimilarity(vec1: DoubleMatrix, vec2: DoubleMatrix): Double = {
    vec1.dot(vec2) / (vec1.norm2() * vec2.norm2())
  }

  /**
    * get Jedis client
    *
    * @param url ,List [ip+port]
    */
  class JedisClient(url: String) extends Serializable {
    val config: JedisPoolConfig = new JedisPoolConfig
    config.setMaxIdle(30)
    config.setMaxTotal(50)
    config.setBlockWhenExhausted(true)
    config.setMaxWaitMillis(10000)
    config.setTestWhileIdle(true)
    var urls = url.split(",")
    var ip = urls(0).split(":")
    var port = ip(1).toInt
    var socTimeOut = 30000

    var pool = new JedisPool(config, ip(0), port, socTimeOut)

    def getJedis(): Jedis = {
      pool.getResource()
    }
  }

  /**
    * calculate ALS algorithm Mean Average Precision
    *
    * @param model
    * @param sc
    * @param ratings
    * @return
    */
  def getMAP(model: MatrixFactorizationModel, sc: SparkContext, ratings: RDD[Rating]): Double = {
    val itemFactors = model.productFeatures.map { case (id, factor)
    => factor
    }.collect()
    val itemMatrix = new DoubleMatrix(itemFactors)
    //分布式广播商品的特征矩阵,在map等转换操作里面，需要全局变量，而且是可序列化的，否则用map，set等集合是不行的
    val imBroadcast = sc.broadcast(itemMatrix)
    val allRecs = model.userFeatures.map { case (userId, array) =>
      val userVector = new DoubleMatrix(array)
      val scores = imBroadcast.value.mmul(userVector)
      val sortedWithId = scores.data.zipWithIndex.sortBy(-_._1)
      val recommendedIds = sortedWithId.map(_._2 + 1).toSeq //+1,矩阵从0开始
      (userId, recommendedIds)
    }
    val userMovies = ratings.map { case Rating(user, product, rating) =>
      (user, product)
    }.groupBy(_._1)
    val predictedAndTrueForRanking = allRecs.join(userMovies).map { case
      (userId, (predicted, actualWithIds)) =>
      val actual = actualWithIds.map(_._2)
      (predicted.toArray, actual.toArray)
    }
    //求MAP
    val rankingMetrics = new RankingMetrics(predictedAndTrueForRanking)
    rankingMetrics.meanAveragePrecision
  }

  /**
    * calculate user similarity by cosineSimilarity
    */
  def getUserSimilarity(): Unit = {
    //val userID = jedis.smembers("user")
    //        for (k <- userID) {
    //          val userId = jedis.get(k).toInt
    //          val userFactor = model.userFeatures.lookup(userId).head
    //          val userVector = new DoubleMatrix(userFactor)
    //          val sims = model.userFeatures.map { case (id, factor) =>
    //            val factorVector = new DoubleMatrix(factor)
    //            val sim = cosineSimilarity(factorVector, userVector)
    //            (id,sim)
    //          }
    //          val sortedSims = sims.top(10)(Ordering.by[(Int,Double),Double]{case (id,similarity) => similarity})
    //          println(sortedSims.slice(1,10).map{case (id,sim) => userSimilarity(k,userInfo.get(id).get,sim)}.mkString("\n"))
    //          //userSimilarities += sortedSims.slice(1,5).map{case (id,sim) => (userId,id,sim)}.mkString("\n")
    //        }
  }

  case class userSimilarity(me: String, other: String, similarity: Double)

  def getSparkSession(runMode: String, master: String): SparkSession = {
    val conf = new SparkConf()
    if (runMode.equals("local")) {
      conf.setMaster("local").setAppName("alsLocal")
    }
    else {
      conf.setMaster(master).setAppName("als")
    }
    SparkSession.builder().config(conf).getOrCreate()
  }

  def getLocalDate(date: String): LocalDate = {
    LocalDate.parse(date, DateTimeFormatter.ofPattern("yyyy-MM-dd"))
  }

  def getCacheRatingsPath(now: LocalDate, num: Int = 6): Array[String] = {
    val sDay = now.minusDays(num)
    var tmpDay = now.minusDays(1)
    var arr = ArrayBuffer[String]()
    for (a <- 1 to num) {
      if (!tmpDay.isBefore(sDay)) {
        arr += tmpDay.toString
        tmpDay = tmpDay.minusDays(1)
      }
    }
    arr.toArray
  }

  def checkPathExists(path: String, sc: SparkContext): Boolean = {
    val hadoopConf = sc.hadoopConfiguration
    val fs = FileSystem.get(hadoopConf)
    fs.exists(new Path(path))
  }

  def numerical(r: Row, jedis: Jedis, rate: Double): Rating = {
    //user info
    if (!jedis.exists(r.getAs(0))) {
      val myUserNum = jedis.incr("userNum").toString
      jedis.set(r.getAs(0), myUserNum)
      jedis.set(userNumPrefix + myUserNum, r.getAs(0))
      jedis.sadd("user", r.getAs(0))
    }
    //click ad info
    val ad = r.getString(1)
    if (!jedis.exists(ad)) {
      val myAdNum = jedis.incr("adNum").toString
      jedis.set(ad, myAdNum)
      jedis.set(adNumPrefix + myAdNum, ad)
      jedis.sadd("ad", ad)
    }
    //save clicked and impressions record,maybe just save clicked is ok
    if (rate == 5) {
      jedis.sadd(userClickAdPrefix + r.getAs(0), ad)
    }
    //trans to number rating
    Rating(jedis.get(r.getAs(0)).toInt, jedis.get(ad).toInt, rate)
  }
  def getRandom(min : Double,max :Double): Double ={
    //var pow = Math.pow(10,1)
    Math.floor((Math.random()*(max-min)+min)* 10) / 10
  }
  def main(args: Array[String]): Unit = {
    if (args.length < 1) {
      println("please special the config file 'sconf.properties',for example: ./sconf.properties")
      System.exit(1)
    }
    val properties = new Properties()
    properties.load(new FileInputStream(args(0)))
    val redisUrl = properties.getProperty("redis.url.list")
    val redisPwd = properties.getProperty("redis.password")
    val readRatingsPath = properties.getProperty("ratings.backup.path")
    val sparkUrl = properties.getProperty("spark.master.url")
    val runModel = properties.getProperty("run.model")
    val hdfsPath = properties.getProperty("hdfs.data.dictionary")
    val sDate = properties.getProperty("data.start.date")
    val eDate = properties.getProperty("data.end.date")
    val loadTrainedData = properties.getProperty("is.load.trained.data").toBoolean
    val loadTrainedDataNum = properties.getProperty("load.trained.data.num").toInt

    val startDate = getLocalDate(sDate);
    val endDate = getLocalDate(eDate);
    val sparkSession = getSparkSession(runModel, sparkUrl)
    val sc = sparkSession.sparkContext
    sc.setLogLevel("warn")
    val dataServer = new AdDataServer(sparkSession, hdfsPath)
    val s = System.currentTimeMillis()
    val alsCounters = List("ad_adclick", "cp_adclick", "ad_adview", "cp_adview")
    val baseData = dataServer.getBaseDataSet(startDate, endDate, alsCounters)
    baseData.persist()
    baseData.count()
    val clickData = dataServer.getClickedDataForALS(baseData).distinct()
    val clickJRDD = clickData.coalesce(48).toJavaRDD
    val impressionsData = dataServer.getImpressDataForALS(baseData).distinct()
    val impressionsJRDD = impressionsData.coalesce(48).toJavaRDD
    val justImpresData = impressionsJRDD.subtract(clickJRDD)
    baseData.unpersist()

    clickJRDD.persist()
    justImpresData.persist()

    val sumUsers = clickJRDD.count() + justImpresData.count()
    println("Finish read data(" + sDate + " to " + eDate + ") from Hadoop.")
    val e = System.currentTimeMillis()
    println("Cost " + (e - s) / 1000 + " second. ")
    if (sumUsers == 0) {
      println("Hadoop dataSet no data from " + sDate + " to " + eDate + ",als application exit(0).")
      System.exit(0)
    }
    val jedisClient = new JedisClient(redisUrl)
    val jedis = jedisClient.getJedis()
    jedis.auth(redisPwd)
    val arr = ArrayBuffer[Rating]()
    println("Start to numerical treatment...")
    val startNumerical = System.currentTimeMillis()
    clickJRDD.rdd.collect().foreach(r => {
      //trans to number rating
      arr += numerical(r, jedis, 5)
    }
    )
    clickJRDD.unpersist()
    justImpresData.rdd.collect().foreach(r => {
      //trans to number rating
      arr += numerical(r, jedis, getRandom(1,2.5))
    }
    )
    if (jedis != null) {
      jedis.close()
    }
    println("numerical treatment cost : " + (System.currentTimeMillis() - startNumerical) / 1000 + "second.")
    justImpresData.unpersist()
    //training data set
    var ratings = sc.parallelize(arr.toArray.toList)
    println("new load train dataSet size = " + ratings.count())
    //save new train dataSet
    println("save newest ratings at " + eDate)
    ratings.distinct().saveAsTextFile(readRatingsPath + ratingsSavePath + "/" + eDate)

    //load old training data
    val startLoadOldRates = System.currentTimeMillis()
    if (loadTrainedData) {
      println("Start to load cache ratings at :")
      for (p <- (getCacheRatingsPath(startDate, loadTrainedDataNum))) {
        val rPath = readRatingsPath + ratingsSavePath + "/" + p
        if (checkPathExists(rPath, sc)) {
          val oldRates = sc.textFile(rPath, 8).map(line =>
            line.substring(7, line.length - 1).split(","))
            .map(r => Rating(r(0).toInt, r(1).toInt, r(2).toDouble))
          println(p + " old train dataSet size = " + oldRates.count())
          ratings = ratings.++(oldRates)
        } else {
          println("Warn : cache ratings path " + p + " doesn't exists.")
        }
      }
    }
    ratings = ratings.distinct()
    println("Load old train dataSet cost : " + (System.currentTimeMillis() - startLoadOldRates) / 1000 + "second.")
    //10~200
    val r = properties.getProperty("als.algorithm.rank")
    var rank = 15
    if (r != null) {
      rank = Integer.valueOf(r.trim)
    }
    val numIters = properties.getProperty("als.algorithm.numIterations")
    var numIterations = 21
    if (numIters != null) {
      numIterations = Integer.valueOf(numIters.trim)
    }
    println("Cache training data...")
    ratings = ratings.coalesce(48)
    ratings.persist()
    println("train dataSet size = " + ratings.count())
    println("Start to run als train...")
    val lamdaConfs = properties.getProperty("als.algorithm.lamda")
    var lamda = 0.01
    if(lamdaConfs != null){
      lamda = lamdaConfs.toDouble
    }
    println("lamda is "+lamda)
    val startALS = System.currentTimeMillis()
    //create model
    val model = ALS.train(ratings, rank, numIterations, lamda)
    //predict result
    val usersProducts = ratings.map {
      case Rating(user, product, rate) =>
        (user, product)
    }
    val predictions = model.predict(usersProducts).map {
      case Rating(user, product, rate) =>
        ((user, product), rate)
    }

    val ratesAndPreds = ratings.map {
      case Rating(user, product, rate) =>
        ((user, product), rate)
    } join (predictions)

    val MSE = ratesAndPreds.map {
      case ((user, product), (r1, r2)) =>
        val err = (r1 - r2)
        err * err
    }.mean()
    println("Finish train,cost " + (System.currentTimeMillis() - startALS) / 1000 + "seconds.")
    println("today new dataSet count : " + sumUsers)
    println("user count : " + model.userFeatures.count())
    println("ad count : " + model.productFeatures.count())

    //Root Mean Squared Error
    val predictedAndTrue = ratesAndPreds.map { case ((user, product), (actual, predicted)) => (actual, predicted) }
    val regressionMetrics = new RegressionMetrics(predictedAndTrue)
    println("Mean Squared Error = " + regressionMetrics.meanSquaredError)
    println("Mean Squared Error = " + MSE)
    println("Root Mean Squared Error = " + regressionMetrics.rootMeanSquaredError)

    println("Mean Average Precision = " + getMAP(model, sc, ratings))

    // get user recommend
    val users = ratings.map(_.user).distinct()
    println("Start to recommend ad for all Users.")
    val start = System.currentTimeMillis()
    val recommendations = ExtMatrixFactorizationModelHelper.recommendProductsForUsers(model, 8)
    val endPartition = System.currentTimeMillis()
    println("Rdd pre-partition cost " + (endPartition - start) / 1000 + "seconds.")
    val rs = recommendations.collect()
    println("Collect recommendations cost "+(System.currentTimeMillis() - endPartition)/1000+"seconds.")
    val jedisNew = jedisClient.getJedis()
    jedisNew.auth(redisPwd)
    rs.foreach(rs => {
      var deviceId = jedisNew.get(userNumPrefix + rs._1)
      jedisNew.set(recommendListPrefix + deviceId, rs._2.map { case Rating(userID, adID, rate) => {
        var adRealId = jedisNew.get(adNumPrefix + adID)
        adRealId + "_" + rate.formatted("%.2f").toDouble
      }
      }.mkString(","))
    })
    println("Finished recommend ad for all Users,cost " + (System.currentTimeMillis() - start) / 1000 + "s.")
    //println("Start to save model...")
    //save model and data set
    //model.save(sc, modelSavePath)
    ratings.unpersist()
    sc.stop()
    jedisNew.close()
    println("ALS model all run finish...")
  }
}
