package org.groupg.practice;

import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.List;


public class SparkRDDDemo01 {
    static final Log log = LogFactory.get();

    public static void main(String[] args) {
        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SparkRDDDemo01");
        conf.set("spark.ui.enabled", "false");


        try (
                JavaSparkContext sc = new JavaSparkContext(conf);
        ) {
            sc.setLogLevel("ERROR");
            JavaRDD<Integer> lines = sc.parallelize(List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 0)).setName("Text");

            log.debug("默认分区数量是：{}", lines.getNumPartitions());
            JavaRDD<Integer> lines1 = lines.filter(t -> t % 2 == 0);

        }

    }
}
