package ALS

import java.io.File
import java.util

import org.apache.commons.io.FileUtils

object perProc {
  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir", new File("").getAbsolutePath)

//    val conf: SparkConf = new SparkConf().setMaster("local").setAppName("My App")
//    val sc: SparkContext = new SparkContext(conf)
//    val map = new util.HashMap[String, Int]

    val inputFile1: String = "topics/topics.csv"
    val inputFile2: String = "topics/topicName.csv"
    //    val inputFile2: String = "ml-20m/ratings.csv"

    var i = 0;

    val map=new util.HashMap[String,Int]()
    FileUtils.readLines(new File(inputFile1)).forEach(
      line=>{
        val sts = line.split(",")
        map.put(sts(0),map.computeIfAbsent(sts(0),_=>0)+sts(2).toInt)
      }
    )
    val ls=new util.ArrayList(map.entrySet())
    ls.sort((e,a)=>e.getValue-a.getValue)
    ls.forEach(println)
  }
}
