package rdd.operate;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.util.Arrays;
import java.util.List;

public class Spark39_Operate_map {
    public static void main(String[] args) {
        final List<Integer> nums = Arrays.asList(1,2,3,4);
        final SparkConf conf = new SparkConf();
        //如果设置单线程local进行处理，输出的结果一定是3412或者1234，因为在最后输出的过程中，只有单线程处理，只能一次输入一个分区内的数据
        conf.setMaster("local[2]");
        conf.setAppName("spark");
        final JavaSparkContext jsc = new JavaSparkContext(conf);
        final JavaRDD<Integer> rdd = jsc.parallelize(nums,2);
        final JavaRDD<Integer> newrdd = rdd.map(
                num -> {
                    System.out.println("@" + num);
                    return num;
                });
        newrdd.collect().forEach(System.out::println);
    }
}
