package spark.udrank

import org.apache.spark.rdd.RDD

/**
  * 为了使用推荐系统的rank算法,定义的一个Matrix
  */
class StringCoordinateMatrix(val value:RDD[(String,String,Double)]) extends Serializable {

  def *(other:StringCoordinateMatrix,topN:Int = 0) = {
    val a = this.value.map(x=>(x._2,(x._1,x._3)))
    val b = other.value.map(x=>(x._1,(x._2,x._3)))
    val multiValue = a.join(b).map{
      x=>
        val rowName = x._2._1._1
        val colName = x._2._2._1
        val cellValue = x._2._1._2*x._2._2._2
        ((rowName,colName),cellValue)
    }.reduceByKey(_ + _).map(x=>(x._1._1,x._1._2,x._2))
    if(topN > 0)
      new StringCoordinateMatrix(
        multiValue.groupBy(_._1).flatMap(_._2.toArray.sortWith(_._3>_._3).take(topN))
      )
    else
      new StringCoordinateMatrix(multiValue)
  }

  def *(a:Double) = new StringCoordinateMatrix(this.value.map(x=>(x._1,x._2,x._3*a)))

  def +(other:StringCoordinateMatrix) = {
    val a = this.value.map(x=>((x._1,x._2),x._3))
    val b = other.value.map(x=>((x._1,x._2),x._3))
    val addValue = a.fullOuterJoin(b).map(
      x =>
        (x._1._1,x._1._2,x._2._1.getOrElse(0.0)+x._2._2.getOrElse(0.0))
    )
    new StringCoordinateMatrix(addValue)
  }

  def transposes(other:StringCoordinateMatrix) = {
    new StringCoordinateMatrix(this.value.map{case (i,j,value)=>(j,i,value)})
  }
}
