package core.rdd.instance;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Arrays;
import java.util.List;

public class Spark01_MEMORY {
    public static void main(String[] args) {
        /**
         * 构建SPARK运行环境
         */
        //远程环境
        SparkConf sparkConf1 = new SparkConf().setAppName("test").setMaster("spark://192.168.111.30:7077");
        //本地环境，2指的是分配了2个线程
        SparkConf sparkConf2 = new SparkConf().setAppName("test").setMaster("local[20]");

        final JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf1);

        List<String> stringList = Arrays.asList("1", "2", "3", "zhansan");


        JavaRDD<String> parallelize = javaSparkContext.parallelize(stringList);



        System.out.println("统计个数:"+parallelize.count());

        try {

        } finally {
            javaSparkContext.close();
        }


    }


}
