package com.datamining.recommend

import breeze.numerics._
import org.apache.spark.rdd.RDD

/**
  * Created by Administrator on 2016/11/3.
  */
class SimilarityAlgorithm {

  /**
    *
    * @param user_item_rdd RDD[(user_id, item_id)]
    * @return RDD[(itemA, itemB, similarity)]
    */
  def itemSimilarity(user_item_rdd: RDD[(String, String)]): RDD[(String, String, Double)] = {


    val user_item_sortByKey = user_item_rdd.sortByKey();
    user_item_sortByKey.cache();
    user_item_sortByKey.take(100).foreach(println)

    //  1 (用户：物品)笛卡尔积 (用户：物品) =>物品:物品组合
    val item_item = user_item_sortByKey.join(user_item_sortByKey).map(data => (data._2, 1));
    item_item.take(100).foreach(println)

    //  2 物品:物品:频次
    val item_frequency = item_item.reduceByKey(_ + _);
    item_frequency.take(100).foreach(println)

    //  3 对角矩阵
    val item_frequency_diagonal = item_frequency.filter(f => f._1._1 == f._1._2); // 相同商品组合
    item_frequency_diagonal.take(100).foreach(x => println(s"相同商品组合 ${x}"))

    //  4 非对角矩阵
    val item_frequency_off_diagonal = item_frequency.filter(f => f._1._1 != f._1._2); // 不相同商品组合
    item_frequency_off_diagonal.take(100).foreach(x => println(s"不相同商品组合 ${x}"))

    //  5 计算同现相似度（物品1，物品2，同现频次）
    val similarity_1 = item_frequency_off_diagonal.map(x => (x._1._1, (x._1._1, x._1._2, x._2))).join(item_frequency_diagonal.map(x => (x._1._1, x._2)));
    val similarity_2 = similarity_1.map(x => (x._2._1._2,(x._2._1._1, x._2._1._2, x._2._1._3, x._2._2)));
    val similarity_3 = similarity_2.join(item_frequency_diagonal.map(x => (x._1._1, x._2)));
    val similarity_4 = similarity_3.map(x => (x._2._1._1, x._2._1._2, x._2._1._3, x._2._1._4, x._2._2));
    val similarity_5 = similarity_4.map(x => (x._1, x._2, (x._3/ sqrt(x._4 * x._5))));
    val similarity_6 = similarity_5.sortBy(x => x._3, false) // 相似度倒叙

    return similarity_6;
  }
}
