import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object scalaPartition {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("wordCount").setMaster("local")
    val sc = new SparkContext(conf)

    val input = sc.textFile("file:///home/jkx/student.txt")
    val rdd = input.map(line => {
      val w = line.split(",")
      (w(3), w(1))
    })
    val data = rdd.partitionBy(new MyPartition(2))

    data.saveAsTextFile("file:///home/jkx/output")
  }
}
