package com.ruby.bigtable.spark;

import java.util.Arrays;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;

import scala.Tuple2;

public class NameScoreJoin {

	@SuppressWarnings({ "unchecked", "resource", "unused", "serial" })
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		SparkConf conf = new SparkConf().setAppName("score name join").setMaster("local");
		JavaSparkContext sc = new JavaSparkContext(conf);
		
		JavaPairRDD<String, String> studentList = sc.textFile("hdfs://hadooptest:9000/user/hadoop/raw/TopN/student.input").mapToPair(new PairFunction<String, String, String>() {
			public Tuple2<String, String> call(String t) throws Exception {
				// TODO Auto-generated method stub
				String studentId = t.split(" ")[0];
				String studentName = t.split(" ")[1];
				return new Tuple2<String, String>(studentId, studentName);
			}
		});
		
		JavaPairRDD<String, Integer> scoreList = sc.textFile("hdfs://hadooptest:9000/user/hadoop/raw/TopN/score.input").mapToPair(new PairFunction<String, String, Integer>() {
			public Tuple2<String, Integer> call(String t) throws Exception {
				// TODO Auto-generated method stub
				String studentId = t.split(" ")[0];
				Integer studentScore = Integer.valueOf(t.split(" ")[1]);
				return new Tuple2<String, Integer>(studentId, studentScore);
			}
		});
		
		JavaPairRDD<String, Tuple2<String, Integer>> joinResult = studentList.join(scoreList);
		JavaPairRDD<Integer, Tuple2<String, String>> scoreOrderList = joinResult.mapToPair(new PairFunction<Tuple2<String,Tuple2<String,Integer>>, Integer, Tuple2<String, String>>() {

			public Tuple2<Integer, Tuple2<String, String>> call(Tuple2<String, Tuple2<String, Integer>> t)
					throws Exception {
				// TODO Auto-generated method stub
				return new Tuple2<Integer, Tuple2<String, String>>(t._2._2, new Tuple2<String, String>(t._1, t._2._1));
			}
		}).sortByKey(false);
		
		JavaPairRDD<String, Tuple2<String, Integer>> resultRdd = scoreOrderList.mapToPair(new PairFunction<Tuple2<Integer,Tuple2<String,String>>, String, Tuple2<String, Integer>>() {

			public Tuple2<String, Tuple2<String, Integer>> call(Tuple2<Integer, Tuple2<String, String>> t)
					throws Exception {
				// TODO Auto-generated method stub
				return new Tuple2<String, Tuple2<String, Integer>>(t._2._1, new  Tuple2<String, Integer>(t._2._2, t._1));
			}
		});
		//resultRdd.saveAsTextFile("hdfs://hadooptest:9000/user/hadoop/output/TopN/1805191024");
		resultRdd.foreach(new VoidFunction<Tuple2<String,Tuple2<String,Integer>>>() {
			
			public void call(Tuple2<String, Tuple2<String, Integer>> t) throws Exception {
				// TODO Auto-generated method stub
				System.out.println(t._1 + "," + t._2._1 + "," + t._2._2);
			}
		});
	}

}
