package TestSparkConversionOperator;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.util.List;

public class SparkMap {
    /**
     * 转换算子之map
     * map操作是最常用的转换算子，
     * 其将RDD的每个元素传入自定义函数后得到新的元素，
     * 并用新的元素组成新的RDD
     * @param args
     */
    public static void main(String[] args) {

        SparkConf sparkConf = new SparkConf().setAppName("SparkTextFile").setMaster("local");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        JavaRDD<String> rdd = javaSparkContext.textFile("./data/*.txt");

        // map: 参数是Funcrion<T1, R>类型对象,T1是RDD元素的数据类型, R是转换之后新的RDD的元素的数据类型
        JavaRDD<String> map = rdd.map(new Function<String, String>() {
            @Override
            public String call(String s) throws Exception {
                System.out.println("+++++++++");
                return s + "--------";
            }
        });
//        map.collect();
        List<String> res = map.collect();
        for (String s:res){
            System.out.println(s);
        }
    }
}
