package com.fanli.bigdata.rec

import java.text.SimpleDateFormat

import org.apache.log4j.{Level, Logger}
import org.apache.solr.client.solrj.impl.HttpSolrServer
import org.apache.solr.common.SolrInputDocument
import org.apache.spark.mllib.recommendation.{MatrixFactorizationModel, Rating}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkContext, SparkConf}
import org.json4s._
import org.json4s.jackson.Serialization
import scopt.OptionParser

import scala.collection.JavaConversions
import scala.collection.mutable.ListBuffer

object SuperMainPagePredict {

  case class Params(
           modelPath: String = null,
           userPath: String = null,
           itemPath: String = null,
           userStep: Int = 10000,
           solrUrl: String = null,
           solrCommitInstance: Int = 7,
           parallelism: Int = 7,
           recTopNum: Int = 500,
           modelPartitionCount: Int = 7) extends AbstractParams[Params]

  def main (args: Array[String]) {

    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)

    val defaultParams = Params()

    val parser = new OptionParser[Params]("SuperMainPagePredict") {
      head("SuperMainPageRec: an super user action data.")
      opt[String]("modelPath")
        .required()
        .text(s"modelPath by training ")
        .action((x, c) => c.copy(modelPath = x))
      opt[String]("userPath")
        .required()
        .text("path to a user(uid)")
        .action((x, c) => c.copy(userPath = x))
      opt[String]("itemPath")
        .required()
        .text("path to a item(pid)")
        .action((x, c) => c.copy(itemPath = x))
      opt[Int]("userStep")
        .text(s"userStep, default: ${defaultParams.userStep}}")
        .action((x, c) => c.copy(userStep = x))
      opt[String]("solr")
        .required()
        .text("path to solr index")
        .action((x, c) => c.copy(solrUrl = x))
      opt[Int]("solrCommitInstance")
        .text(s"solrCommitInstance, default: ${defaultParams.solrCommitInstance}}")
        .action((x, c) => c.copy(solrCommitInstance = x))
      opt[Int]("parallelism")
        .text(s"parallelism, default: ${defaultParams.parallelism}}")
        .action((x, c) => c.copy(parallelism = x))
      opt[Int]("recTopNum")
        .text(s"recTopNum, default: ${defaultParams.recTopNum}}")
        .action((x, c) => c.copy(recTopNum = x))
      opt[Int]("modelPartitionCount")
        .text(s"modelPartitionCount, default: ${defaultParams.modelPartitionCount}}")
        .action((x, c) => c.copy(modelPartitionCount = x))
      note(
        """
          |Example command line to run this app:
          |
          | bin/spark-submit --class org.apache.spark.examples.mllib.MovieLensALS \
          |  examples/target/scala-*/spark-examples-*.jar \
          |  --rank 10 --numIterations 15 --regParam 0.01 --alpha 1.0 \
          |  --ratings data/mllib/als/sample_movielens_ratings.txt
        """.stripMargin)
    }

    parser.parse(args, defaultParams).map { params =>
      run(params)
    } getOrElse {
      System.exit(1)
    }
  }

  def run(params: Params): Unit = {
    val conf = new SparkConf()
    val sc = new SparkContext(conf)

    // for get model &repartition
    /*val model = getRepartitionModel(MatrixFactorizationModel.load(sc, params.modelPath),
                                    params.modelPartitionCount)*/
    val loadModel = MatrixFactorizationModel.load(sc, params.modelPath)
    println(s"load model user features partitions count: ${loadModel.userFeatures.partitions.length}")
    println(s"load model product features partitions count: ${loadModel.productFeatures.partitions.length}")

    val userFeatures = loadModel.userFeatures.coalesce(params.modelPartitionCount).persist(StorageLevel.MEMORY_AND_DISK)
    val productFeatures = loadModel.productFeatures.coalesce(params.modelPartitionCount).persist(StorageLevel.MEMORY_AND_DISK)

    val model = new MatrixFactorizationModel(loadModel.rank, userFeatures, productFeatures)

    println("load training model done.")

    // get predicted items
    val userList = sc.textFile(params.userPath)
                     .map(_.trim)
                     .filter{ user =>
                        if (user.isEmpty) false
                        else true
                     }
                     .map(data => (data.hashCode, data))
                     .cache()

    val itemList = sc.textFile(params.itemPath)
                     .map(_.trim)
                     .filter{ item =>
                        if (item.isEmpty) false
                        else true
                     }
                     .map(data => (data.hashCode, data))
                     .repartition(params.parallelism)
                     .persist(StorageLevel.MEMORY_AND_DISK)

    println(s"model rank: ${model.rank}")
    println(s"model user features partitions count: ${model.userFeatures.partitions.length}")
    println(s"model product features partitions count: ${model.productFeatures.partitions.length}")
    println(s"model user features storage level: ${model.userFeatures.getStorageLevel}")
    println(s"model product features storage level: ${model.productFeatures.getStorageLevel}")
    println(s"item partitions count : ${itemList.partitions.length}")
    println(s"solr url ${params.solrUrl}")

    // hashcode ---> string id  map
    val userIDMap = userList.collect().toMap  // hashcode --> src uid(string)
    val itemIDMap = itemList.collect().toMap  // hashcode --> src item(string)

    val brdUserIDMap = sc.broadcast[Map[Int, String]](userIDMap)
    val brdItemIDMap = sc.broadcast[Map[Int, String]](itemIDMap)
    val solrList     = params.solrUrl.split(",").toList
    val brdSolrList  = sc.broadcast[List[String]](solrList)
    val brdRecTopNum = sc.broadcast[Int](params.recTopNum)

    println(s"users count:${userIDMap.size}")
    println(s"items count:${itemIDMap.size}")

    userList.unpersist()

    val initList = ListBuffer.empty[(Int, Double)]
    val seqOp = (c : ListBuffer[(Int, Double)], v : (Int, Double)) => c += v
    val combOp = (c1 : ListBuffer[(Int, Double)], c2 : ListBuffer[(Int, Double)]) => c1 ++= c2

    var index = 0
    userIDMap.map(_._1).toList.grouped(params.userStep).foreach { uids =>
      val uidRdd = sc.parallelize(uids, params.parallelism)
      val predictedData = uidRdd.cartesian(itemList.map(_._1)).persist(StorageLevel.MEMORY_AND_DISK)

      val startTime = System.currentTimeMillis()

      model.predict(predictedData)
        .filter { r =>
        if (r.rating.equals(Double.NaN)) false   // filter prediction == Float.NaN
        else true
        }
        .mapPartitions { partition =>
          partition.map(r => (r.user, (r.product, r.rating)))
        }
        // [(Int (uid), ListBuffer[(Int, Double)](1000))]
        .aggregateByKey(initList, params.solrCommitInstance)(seqOp, combOp)
        //.coalesce(params.solrCommitInstance)
        .foreachPartition { partition =>
             solrIndex(partition.toSeq, brdUserIDMap.value, brdItemIDMap.value, brdSolrList.value, brdRecTopNum.value)
        }

      val endTime = System.currentTimeMillis()
      println("predict time:" + (endTime - startTime) / 1000 + "s.")

      predictedData.unpersist()

      index += 1

      val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
      println("current time:" + format.format(System.currentTimeMillis()))
      println(s"complete step : $index")
    }

    sc.stop()
  }

  def getRepartitionModel(model : MatrixFactorizationModel, partitionCount : Int) : MatrixFactorizationModel = {
    val userFeatures = model.userFeatures.repartition(partitionCount)
    val productFeatures = model.productFeatures.repartition(partitionCount)

    new MatrixFactorizationModel(model.rank, userFeatures, productFeatures)
  }

  def getSolrServer(url : String): HttpSolrServer = {
    val server = new HttpSolrServer(url)
    server.setSoTimeout(600000) // 10 minutes for read
    server.setConnectionTimeout(60000)
    server.setDefaultMaxConnectionsPerHost(100)
    server.setMaxTotalConnections(100)
    server.setFollowRedirects(false)
    server.setAllowCompression(true)
    server.setMaxRetries(1)

    server
  }

  def solrIndex(elements : Seq[(Int, ListBuffer[(Int, Double)])],
                userIDMap : Map[Int, String],
                itemIDMap : Map[Int, String],
                solrUrl : List[String],
                topNum : Int): Unit = {

    val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
    println("current time:" + format.format(System.currentTimeMillis()))

    implicit val formats = DefaultFormats
    val docs = elements.map { element =>
      val document = new SolrInputDocument()
      val rec_items = element._2.map(r => (itemIDMap(r._1), r._2)).sortWith(_._2 > _._2).slice(0, topNum)
      document.addField("user_id", userIDMap(element._1))
      document.addField("rec_items", Serialization.write(rec_items))
      document
    }

    if (docs.nonEmpty) {
      val startTime = System.currentTimeMillis()
      solrUrl.map(getSolrServer).foreach { server =>
        println(s"commit index for : ${server.getBaseURL}")

        server.add(JavaConversions.asJavaCollection(docs))
        // shutdown
        server.shutdown()
        println(s"shutdown solr server.${server.getBaseURL}")
      }

      val endTime = System.currentTimeMillis()
      println(s"commit to index count: ${docs.size}")
      println("commit time:" + (endTime - startTime) / 1000 + "s.")
    } else {
      println(s"solr index docs is empty, size = ${docs.size} ")
    }
  }

}
