package spark.udrank

import java.util.Date

import org.apache.spark.mllib.linalg.{Matrix, DenseMatrix}
import org.apache.spark.mllib.linalg.distributed.{BlockMatrix, CoordinateMatrix, MatrixEntry}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{HashPartitioner, SparkContext, SparkConf}
import scala.io.Source

/**
  * 使用自定义rank进行推荐
  */
object UDRank {

  def useStringCoordinateMatrixCompute(sc:SparkContext,
                                       initRDD:RDD[(String,String,Double)],
                                       mblRDD:RDD[(String,String,Double)],
                                       mlpRDD:RDD[(String,String,Double)],
                                       mplRDD:RDD[(String,String,Double)],
                                       mlbRDD:RDD[(String,String,Double)]) = {
    def rank(initMatrix:StringCoordinateMatrix,probabilityMatrix:StringCoordinateMatrix,
             d:Double = 0.15,iterations:Int = 30,partitionNum:Int = 2) = {
      var sysRDD = initMatrix.value

      val probabilityRDD = probabilityMatrix.value.map(x=>(x._1,(x._2,x._3)))
        .partitionBy(new HashPartitioner(partitionNum)).persist(StorageLevel.MEMORY_AND_DISK_SER)

      val initRDD = initMatrix.value.map(x=>((x._1,x._2),x._3 * d))
        .partitionBy(new HashPartitioner(partitionNum)).persist(StorageLevel.MEMORY_AND_DISK_SER)

      for(i <- 0 until iterations){
        sysRDD = sysRDD.map(x=>(x._2,(x._1,x._3))).join(probabilityRDD).map{
          x=>
            val rowName = x._2._1._1
            val colName = x._2._2._1
            val cellValue = x._2._1._2*x._2._2._2
            ((rowName,colName),cellValue)
        }.reduceByKey(new HashPartitioner(partitionNum),_ + _).mapValues(_ * (1-d))
          .fullOuterJoin(initRDD).map(x=>(x._1._1,x._1._2,x._2._1.getOrElse(0.0)+x._2._2.getOrElse(0.0)))
      }
      sysRDD
    }

    val mbl = new StringCoordinateMatrix(mblRDD)
    val mlp = new StringCoordinateMatrix(mlpRDD)
    val mpl = new StringCoordinateMatrix(mplRDD)
    val mlb = new StringCoordinateMatrix(mlbRDD)

    val initMatrix = new StringCoordinateMatrix(initRDD)

    val a = 0.85

    val mbb = (mbl * mlp * mpl * mlb * (1-a)) + (mbl * mlb * a)
    rank(initMatrix,mbb)
  }

  def useBlockMatrixCompute(sc:SparkContext,
                     initRDD:RDD[(String,String,Double)],
                     mblRDD:RDD[(String,String,Double)],
                     mlpRDD:RDD[(String,String,Double)],
                     mplRDD:RDD[(String,String,Double)],
                     mlbRDD:RDD[(String,String,Double)]) = {

    val mbl = new CoordinateMatrix(mblRDD.map(x => new MatrixEntry(x._1.toLong,x._2.toLong,x._3))).toBlockMatrix(1,1)
    val mlp = new CoordinateMatrix(mlpRDD.map(x => new MatrixEntry(x._1.toLong,x._2.toLong,x._3))).toBlockMatrix(1,1)
    val mpl = new CoordinateMatrix(mplRDD.map(x => new MatrixEntry(x._1.toLong,x._2.toLong,x._3))).toBlockMatrix(1,1)
    val mlb = new CoordinateMatrix(mlbRDD.map(x => new MatrixEntry(x._1.toLong,x._2.toLong,x._3))).toBlockMatrix(1,1)

    val initMatrix = new CoordinateMatrix(initRDD.map(x => new MatrixEntry(x._1.toLong,x._2.toLong,x._3)),2,5).toBlockMatrix(1,1)

    val rowNums= mbl.numRows().toInt

    def num2BlockMatrix(a:Double) = {
      val rdd = sc.parallelize(new Array[Int](rowNums)).zipWithIndex().map(x=> new MatrixEntry(x._2,x._2,a))
      new CoordinateMatrix(rdd).toBlockMatrix(1,1)
    }

    val a = 0.85
    val mbb = (mbl multiply mlp multiply mpl multiply mlb multiply num2BlockMatrix(1-a)) add (mbl multiply mlb multiply num2BlockMatrix(a))


    def rank(initMatrix:BlockMatrix,probabilityMatrix:BlockMatrix,
             d:Double = 0.15,iterations:Int = 30,partitionNum:Int = 2) = {
      var sysMatrix = initMatrix
      val ini = initMatrix.persist(StorageLevel.MEMORY_AND_DISK_SER)
      val probability = probabilityMatrix.persist(StorageLevel.MEMORY_AND_DISK_SER)
      for(i <- 0 until iterations){
        sysMatrix = (sysMatrix multiply probability multiply num2BlockMatrix(1-d)) add (ini multiply num2BlockMatrix(d))
      }
      sysMatrix.toLocalMatrix()
    }

    rank(initMatrix,mbb)
  }

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("spark-partition-test").setMaster("local")
    val sc = new SparkContext(conf)
    val userSrc = Source.fromFile("data/spark/udrank/user2label.txt").getLines().toArray
    val bookSrc = Source.fromFile("data/spark/udrank/book2label.txt").getLines().toArray
    val userBooks = Source.fromFile("data/spark/udrank/userBooks.txt").getLines().toArray

    val userLabelRDD = sc.parallelize(userSrc.map{
      line =>
        val strs = line.split(" ")
        (strs(0),strs(1).split(","))
    }).flatMap(x=>x._2.map(y=>((x._1,y),1.0))).reduceByKey(_+_).map(x=>(x._1._1,x._1._2,x._2))
    val bookLabelRDD = sc.parallelize(bookSrc.map{
      line =>
        val strs = line.split(" ")
        (strs(0),strs(1).split(","))
    }).flatMap(x=>x._2.map(y=>((x._1,y),1.0))).reduceByKey(_+_).map(x=>(x._1._1,x._1._2,x._2))
    val userBooksRDD = sc.parallelize(userBooks.map{
      line =>
        val strs = line.split(" ")
        (strs(0),strs(1).split(","))
    }).flatMap(x=>x._2.map(y=>((x._1,y),1.0))).reduceByKey(_+_).map(x=>(x._1._1,x._1._2,x._2))

    val mblRDD = bookLabelRDD.map(x=>(x._1,(x._2,x._3))).groupByKey().
      flatMap(x=>x._2.map(y=>(x._1,y._1,y._2/x._2.map(_._2).sum)))
    val mlpRDD = userLabelRDD.map(x=>(x._2,(x._1,x._3))).groupByKey().
      flatMap(x=>x._2.map(y=>(x._1,y._1,y._2/x._2.map(_._2).sum)))
    val mplRDD = userLabelRDD.map(x=>(x._1,(x._2,x._3))).groupByKey().
      flatMap(x=>x._2.map(y=>(x._1,y._1,y._2/x._2.map(_._2).sum)))
    val mlbRDD = bookLabelRDD.map(x=>(x._2,(x._1,x._3))).groupByKey().
      flatMap(x=>x._2.map(y=>(x._1,y._1,y._2/x._2.map(_._2).sum)))

    val initRDD = userBooksRDD.map(x=>(x._1,(x._2,x._3))).groupByKey().
      flatMap(x=>x._2.map(y=>(x._1,y._1,y._2/x._2.map(_._2).sum)))

    val startTime = new Date()
    val rankResult = useStringCoordinateMatrixCompute(sc,initRDD,mblRDD,mlpRDD,mplRDD,mlbRDD)
//    val rankResult = useBlockMatrixCompute(sc,initRDD,mblRDD,mlpRDD,mplRDD,mlbRDD)
    val stopTime = new Date()
    rankResult.toArray.foreach(println)
    sc.stop()

    println(stopTime.getTime - startTime.getTime)
  }
}
