package com.catmiao.rdd.instance;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Arrays;
import java.util.List;

/**
 * @author ChengMiao
 * @title: Spark01_env
 * @projectName spark_study
 * @description: TODO
 * @date 2024/7/30 19:54
 */
public class Spark02_Memory {

    public static void main(String[] args) {

        SparkConf sparkConf = new SparkConf();
        sparkConf.setMaster("local");
        sparkConf.setAppName("env01");

        JavaSparkContext javaSparkConf = new JavaSparkContext(sparkConf);


        // 数据源
        List<String> list = Arrays.asList("kirito", "asuna", "monster");

        JavaRDD<String> rdd = javaSparkConf.parallelize(list);


        List<String> collect = rdd.collect();
        collect.stream().forEach(System.out::println);

        javaSparkConf.stop();
    }
}
