package com.demo.spark.rdd;

import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.util.Arrays;

public class RddMap {
    public static void main(String[] args) {

        SparkConf config = new SparkConf().setAppName("rdd map").setMaster("local");
        JavaSparkContext context = new JavaSparkContext(config);
        JavaRDD rdd = context.parallelize(Arrays.asList(1, 3, 5,7));
        JavaRDD result = rdd.map(
                new Function<Integer,Integer>(){
                    @Override
                    public Integer call(Integer x) throws Exception {
                        return x * x;
                    }
                });
        System.out.println(StringUtils.join(result.collect(),","));
    }
}
