package com.imooc.base

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer
import scala.math._

object cf {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[2]").setAppName("CF Spark")

    val sc = new SparkContext(conf)
    val lines = sc.textFile(args(0))
    val output_path = args(1).toString

    val max_prefs_per_user = 20
    //step 1. normallization, obtain UI matrix
    lines.map {x =>
      val ss = x.split("\t")
      val userid = ss(0).toString
      val itemid = ss(1).toString
      val score =  ss(2).toDouble

      (userid,(itemid,score))
    }.groupByKey().flatMap {x=>
      val userid = x._1
      val is_list = x._2
      val is_arr = is_list.toArray
      var is_arr_len = is_arr.length
      if (is_arr_len > max_prefs_per_user){
        is_arr_len = max_prefs_per_user
      }
      var i_us_arr = new ArrayBuffer[(String,(String,Double))]
      for (i <- 0 until is_arr_len){
        val itemid = is_arr(i)._1
        val score = is_arr(i)._2
        i_us_arr += ((itemid,(userid, score)))
      }
      i_us_arr
    }.groupByKey().map(x=>{
      val itemid=x._1
      val us_list = x._2
      val us_arr = us_list.toArray
      var sum:Double = 0.0
      for (i <- 0 until us_arr.length){
        sum += pow(us_arr(i)._2,2)
      }
      sum = sqrt(sum)

    })

  }

}
