package core.rdd.transform;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.util.Arrays;
import java.util.List;

public class Spark01_MAP {


    public static void main(String[] args) {
        /**
         *map方法是将A集合变成B集合，即对A基合进行操作得到B集合
         */


        // 配置SparkConf指向你的Spark master URL
        SparkConf conf = new SparkConf()
                .setAppName("Spark01_MAP") // 应用名称
                .setMaster("local[*]"); // 替换成你的master地址
        JavaSparkContext sc = new JavaSparkContext(conf);
        // 创建JavaSparkContext，它是与集群交互的主要入口点
        try {

            List<Integer> stringList = Arrays.asList(1, 2, 3, 4, 5, 6, 7);
            JavaRDD<Integer> parallelize = sc.parallelize(stringList, 2);
            /**
             * 非省略写法
             */
            JavaRDD<Integer> map = parallelize.map(new Function<Integer, Integer>() {
                @Override
                public Integer call(Integer integer) throws Exception {
                    return integer * 2;
                }
            });


            /**
             * Lambda
             */
            JavaRDD<Object> map2 = parallelize.map(
                    in -> 2 * in
            );
            map.collect().forEach(System.out::println);

        } finally {
            sc.close();
        }
    }
}
