package com.song.sparkstudy;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFlatMapFunction;

import scala.Tuple2;

public class JavaRddTest {
	
	
	
	public static void main(String[] args) {
		SparkConf conf = new SparkConf().setAppName("rddtest");
		JavaSparkContext context = new JavaSparkContext(conf);
		
		
        JavaRDD<Integer> rdd = context.parallelize(Arrays.asList(1, 2, 3, 4, 5));
        List<String> list = new ArrayList<>();
        list.add("a,b,c,d,e");
        list.add("1,2,3,4,5");
        JavaRDD<String> rddString1 = context.parallelize(list);

        List<String> list2 = new ArrayList<>();
        list2.add("1,2,3,4,5");
        list2.add("aa,bb,cc,dd,ee");
        list2.add("11,22,33,44,55");
        JavaRDD<String> rddString2 = context.parallelize(list2);
		
		rddmap(rdd);
		rddmap2(rdd);
		rddfilter(rdd);
		rddunion(rddString1 , rddString2);
		maptopair(rddString1);
		context.close();
	}

	private static void maptopair(JavaRDD<String> rddString1) {
		
		JavaPairRDD<String, String> pairrdd = rddString1.flatMapToPair(new PairFlatMapFunction<String, String, String>() {

			@Override
			public Iterable<Tuple2<String, String>> call(String str) throws Exception {
				 String[] temp = str.split(","); 
	             ArrayList<Tuple2<String, String>> list = new ArrayList<Tuple2<String, String>>();
	             list.add(new Tuple2<String, String>(temp[0], temp[1]));
				return list;
			}
		});
		for(Tuple2 tuple :pairrdd.collect())
		{
			System.out.println("pairrdd------"+tuple._1 +"-----"+tuple._2);
		}
	}

	private static void rddunion(JavaRDD<String> rddString1, JavaRDD<String> rddString2) {
		JavaRDD<String> unionrdd = rddString1.union(rddString2);
		
		for(String str :unionrdd.collect())
		{
			System.out.println("  union str is "+ str);
		}
		
	}

	private static void rddfilter(JavaRDD<Integer> rdd) {
		
		JavaRDD<Integer> filterrdd = rdd.filter(new Function<Integer, Boolean>() {

			@Override
			public Boolean call(Integer v1) throws Exception {
				
				return v1 %2 ==0;
			}
		});
		
		
		for(Integer  v :filterrdd.collect())
		{
			System.out.println("rdd filter is "+ v);
			
		}
	}

	private static void rddmap2(JavaRDD<Integer> rdd) {
		
		JavaRDD<Tuple2> maprdds = rdd.map(new Function<Integer, Tuple2>() {

			@Override
			public Tuple2 call(Integer v1) throws Exception {
				
				return new Tuple2(v1, v1*3);
			}
		});
		
		for(Tuple2 tuple :maprdds.collect())
		{
			System.out.println("map2------"+tuple._1 +"-----"+tuple._2);
		}
		
	}

	/**
	 *  对原来的数据进行一定的操作
	 * @param rdd
	 */
	private static void rddmap(JavaRDD<Integer> rdd) {
		
		JavaRDD<Integer> maprdd = rdd.map(new Function<Integer, Integer>() {

			@Override
			public Integer call(Integer v1) throws Exception {
				// TODO Auto-generated method stub
				return v1*2;
			}
		});
		
		for(Integer id :maprdd.collect())
		{
			System.out.println(" rdd map is "+id);
			
		}
		System.out.println();
		
	}
	
	
	
	
}
