import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

object SortTest {
  def main(args: Array[String]): Unit = {
    var index:Int = 0;
    val conf =new SparkConf().setAppName("TopValues").setMaster("local")
    conf.set("spark.testing.memory", "2147480000")
    val sc = new SparkContext(conf)
    val lines = sc.textFile("src/sortfile", 3)
    val result = lines.filter(x => x.trim.length>0).map(x => (x.toInt, ""))
      .partitionBy(new HashPartitioner(1))
      .sortByKey()
      .map(x => {
        index += 1
        (index+"\t"+x._1)
      })
    result.saveAsTextFile("src/sortResult")
  }
}
