package test.java

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.mutable.Buffer

object getScore {
    def main(args: Array[String]): Unit = {
        val config: SparkConf = new SparkConf().setMaster("local[*]").setAppName("getScore")

        val sc = new SparkContext(config)

        val in: RDD[String] = sc.textFile("C:\\Users\\26410\\Desktop\\ans.txt",1)

        val spin: RDD[Array[String]] = in.map(_.split(" "))

       /* val list : List[(Int,Int)] = List()
        val buffer: Buffer[(Int, Int)] = list.toBuffer*/
        var line = 1

        val value: RDD[(Int, Int)] = spin.map(arr => {

            val sumLine: Int = arr.map(Integer.valueOf(_).intValue()).sum

            println(sumLine + " " + line)

            line += 1

            (sumLine, line)
        })

        println(value.sortBy(_._1,false).take(3).mkString(" "))

        /*spin.foreach(arr=>{

            val sumLine: Int = arr.map(Integer.valueOf(_).intValue()).sum

            println(sumLine+" "+line)

            val tuple: (Int, Int) = (sumLine,line)

            buffer.append(tuple)

            line += 1
        })*/


    }

}
