package cn.lecosa.spark
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.HashPartitioner

object Sort {
  def main(args: Array[String]) {

    val conf = new SparkConf().setAppName(this.getClass.getName).setMaster("local[*]")
    val sc = new SparkContext(conf);
    val lines = sc.textFile("hdfs://park01:9000/home/sort", 3)
    val r1 = lines.filter(_.trim.length() > 0).map { num => (num.trim.toInt, "") };
    val r2 = r1.partitionBy(new HashPartitioner( 2 )).sortByKey()
    var idx = 0;
    val r3 = r2.map { x =>
      {
        (idx+1, x._1)
      }
    }
    r3.foreach { x => { println(x._1 + "\t" + x._2) } }
//  r3.saveAsTextFile("hdfs://park01:9000/home/sort1");
    sc.stop();
  }
}