package com.cw.recommend.common

import com.cw.recommend.common.constant.MAX_SIM_ITEM_NUM
import com.cw.recommend.common.model.{ProductSimItem, ProductSimList}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.jblas.DoubleMatrix

package object feature {


  def cosineSim(a: Array[Double], b: Array[Double]): Double = {
    val vec1 = new DoubleMatrix(a)
    val vec2 = new DoubleMatrix(b)
    (vec1 dot vec2) / vec1.norm2 / vec2.norm2

  }

  type Feature = (Int, Array[Double])
  def computeFeatureSimilarity(spark: SparkSession, features: RDD[(Feature, Feature)]): DataFrame = {
    import spark.implicits._
    features.map { case (a, b) =>
        a._1 -> ProductSimItem(b._1, cosineSim(a._2, b._2))
      }
      .filter(_._2.sim > 0)
      .groupByKey
      .map { case (u, xs) =>
        ProductSimList(u, xs.toSeq.sortWith(_.sim > _.sim).take(MAX_SIM_ITEM_NUM))
      }
      .toDF
  }







}
