from pyspark import SparkConf, SparkContext
import os

os.environ['PYSPARK_PYTHON'] = r"D://Python/Python3107/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5])

# collect算子
print(rdd.collect())
print(type(rdd.collect()))

# reduce算子
num = rdd.reduce(lambda x, y: x + y)
print(num)
print(type(num))

# take算子 取出RDD前N个元素，组成list返回
take_list = rdd.take(3)
print(take_list)
print(type(take_list))

# count 算子 统计RDD元素个数，返回值为 int
count = rdd.count()
print(count)
print(type(count))

 # first 算子 获取RDD的第一个元素，返回值为对象
first = rdd.first()
print(first)
print(type(first))

