package org.spark.core.action.java;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.spark.sparkUtil.SparkJavaContextUtil;

import java.util.Arrays;
import java.util.List;

/**
 * 功能概述:
 * Datetime:    2020/5/28   19:18
 * Author:   某人的目光
 */
public class Count {
    public static void main(String[] args) {
        JavaSparkContext sc = SparkJavaContextUtil.getSparkStart("Count");
        // 有一个集合，里面有1到10,10个数字，现在要对10个数字进行累加
        List<Integer> numberList = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        JavaRDD<Integer> numbers = sc.parallelize(numberList);

        // 对rdd使用count操作，统计它有多少个元素
        long count = numbers.count();
        System.out.println(count);

        // 关闭JavaSparkContext
        sc.close();
    }
}
