package spark.example

import org.apache.spark._
import SparkContext._

object Test_groupByKey {
    def main(args: Array[String]) {
        val conf = new SparkConf().setAppName("TEST")

        val sc = new SparkContext(conf)
        val input = sc.textFile(args(0))
        val output = args(1).toString

        val UI_RDD = input.filter{ x =>
            val fields = x.split("	")
            fields(2).toDouble > 0.001
        }.map { line =>
            val fields = line.split("	")
            (fields(0).toString, (fields(1).toString, fields(2).toString))
        }.groupByKey().map { line =>
            val userid = line._1
            val value_list = line._2

            val value_arr = value_list.toArray
            val len = value_arr.length

            val buf = new StringBuilder
            for (i <- 0 until len - 1) {
                buf ++= value_arr(i)._1
                buf.append(",")
            }
            buf ++= value_arr(len - 1)._1

            userid + "\t" + buf + "\n"
            "partition nums: " + input.partitions.size + "\n"

        }.saveAsTextFile(output)
        // println("partition nums: ",input.partitions.size)
    }
}
