package org.eking.bigdata.spark;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;

import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.spark.api.java.JavaDoubleRDD;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;

import com.esotericsoftware.kryo.Kryo;

import org.apache.spark.serializer.KryoRegistrator;
import org.apache.spark.sql.Row;
import org.apache.spark.SparkConf;

import scala.Tuple2;
import scala.collection.immutable.List;

public class SparkFunTest {

	public static void main(String[] args) {
		SparkConf conf = new SparkConf().setAppName("SparkTest");
		conf.setMaster("spark://127.0.0.1:7702");
		JavaSparkContext sc = new JavaSparkContext(conf);
		
		
		
		/*map
		JavaRDD<String> distFile = sc.parallelize(Arrays.asList("1", "2","3"));
		distFile.cache();
		System.out.println(distFile.collect());
		JavaRDD<String> MapRDD = distFile.map(new Function<String, String>(){
			@Override
			public String call(String s) throws Exception {
				// TODO Test Map
				Integer i = Integer.parseInt(s);
				i += 1;
				String ret = i.toString();
				return ret;
			}
			
		});*/
		
		/*filter
		JavaRDD<String> distFile = sc.parallelize(Arrays.asList("1", "2","3"));
		distFile.cache();
		System.out.println(distFile.collect());
		JavaRDD<String> Filter = distFile.filter(new Function<String, Boolean>(){
			@Override
			public Boolean call(String s) throws Exception {
				// TODO Filter
				Integer i = Integer.parseInt(s);
				int mod = i % 2;
				if(mod == 0)
					return false;
				
				return true;
			}
			
		});
		*/
		
		/* faltmap
		JavaRDD<String> distFile = sc.parallelize(Arrays.asList("1 3", "2 2","3 4"));
		System.out.println(distFile.collect());
		JavaRDD<String> Filter = distFile.flatMap(new FlatMapFunction<String, String>(){
			@Override
			public Iterator<String> call(String s) throws Exception {
				// TODO Filter
				String[] ret = s.split(" ");
				
				return Arrays.asList(ret).iterator();
			}
			
		});
		*/
		
		/* mappartition & MapToPair
		JavaRDD<Integer> distFile = sc.parallelize(Arrays.asList(1,1,3,4,1,6,7,8,9), 4);
		
		System.out.println(distFile.collect());
		
		JavaPairRDD<Integer, Integer> s = distFile.mapToPair(new PairFunction<Integer, Integer, Integer>(){

			@Override
			public Tuple2<Integer, Integer> call(Integer parm) throws Exception {
				// 
				Tuple2<Integer, Integer> tuple = new Tuple2<>(parm, 1);
				
				return tuple;
			}
		});
		
		
		JavaRDD<Integer> Filter = distFile.mapPartitions(new FlatMapFunction<Iterator<Integer>, Integer>(){

			@Override
			public Iterator<Integer> call(Iterator<Integer> arg)
					throws Exception {
				// 
				ArrayList<Integer> ret = new ArrayList();
				int i;
				i = 0;
				while(arg.hasNext()){
					
					ret.add(arg.next() + i);						
					i++;
				}
				return ret.iterator();
			}
			
		});
		*/
		/*mapPartitionsWithIndex
		JavaRDD<Integer> distFile = sc.parallelize(Arrays.asList(1,2,3,4,5,6,7,8,9), 3);		
		System.out.println(distFile.collect());
		JavaRDD<Integer> Filter = distFile.mapPartitionsWithIndex(
				new Function2<Integer, Iterator<Integer>, Iterator<Integer>>(){

			@Override
			public Iterator<Integer> call(Integer idx, Iterator<Integer> parm)
					throws Exception {
				// 
				ArrayList<Integer> ret = new ArrayList();

				while(parm.hasNext()){
					
					ret.add(parm.next() + idx);						
				}
				return ret.iterator();
			}
			
		}, false);
		
		System.out.println(Filter.collect());
		*/
		
		/*union
		JavaRDD<Integer> distFile1 = sc.parallelize(Arrays.asList(1,2,3,4,5,6,7,8,9));	
		JavaRDD<Integer> distFile2 = sc.parallelize(Arrays.asList(10,11,12,13,14,15,17,18,19));	
		
		JavaRDD<Integer> distFile3 = distFile1.union(distFile2);
		System.out.println(distFile3.collect());
		*/
		
		
		/*
		JavaRDD<Integer> distFile1 = sc.parallelize(Arrays.asList(1,2,3,4,5,6,7,8,9));	
		JavaRDD<Integer> distFile2 = sc.parallelize(Arrays.asList(1,11,2,13,14,15,17,18,9));	
		
		JavaRDD<Integer> distFile3 = distFile1.intersection(distFile2);
		System.out.println(distFile3.collect());
		*/
		
		/*
		
		JavaRDD<Integer> distFile = sc.parallelize(Arrays.asList(1,2,3,4,5,6,7,8,9));	
		distFile.subtract();
		distFile.repartition(2);
		*/
		
		
		
		
//		Vector vec = Vectors.dense(new double[]{1.0,100,500, 3.0});
//		Normalizer normal = new Normalizer();
//		Vector ret = normal.transform(vec);
//		System.out.println(ret.toString());
	

		sc.close();
	}

}
