package edu.hhu.innerac.sparkm;

import java.util.ArrayList;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

public class App 
{
    public static void main( String[] args ){
    	System.setProperty("hadoop.home.dir", "F:/hadoop-common-2.2.0-bin-master");
    	SparkConf sparkConf = new SparkConf().setAppName("JavaSparkPi").setMaster("spark://master:7077");
//    	SparkConf sparkConf = new SparkConf().setAppName("JavaSparkPi").setMaster("local");
    	JavaSparkContext jsc = new JavaSparkContext(sparkConf);
    	jsc.addJar("/home/cloud/sparkm.jar");
    	
    	List<Integer> nums = new ArrayList<Integer>();
    	nums.add(1);
    	nums.add(2);
    	nums.add(3);
    	nums.add(4);
    	nums.add(5);
    	
    	JavaRDD<Integer> numdds = jsc.parallelize(nums);
    	
    	System.out.println(numdds.count());
    	JavaRDD<Integer> numddes = numdds.map(new Function<Integer, Integer>() {

			public Integer call(Integer v1) throws Exception {
				return v1*v1;
			}
		});
    	System.out.println(numddes.collect());
    	jsc.stop();
    }
}
