'''RDD数据输出_python对象
'''

from pyspark import SparkConf, SparkContext
# 使用os 配置python安装位置， 帮助spark找到python;
import os
os.environ['PYSPARK_PYTHON'] = 'D:\\yfxdeve\\python\Python39\\python.exe'

conf = SparkConf().setMaster("local[*]").setAppName("test_park")
sc = SparkContext(conf=conf)

# 准备一个RDD
rdd1 = sc.parallelize([1,2,3,4,5])

# collect() 输出RDD为list对象
rdd_list = rdd1.collect()
print(rdd_list, type(rdd_list)) # [1, 2, 3, 4, 5] <class 'list'>

# reduce() 对RDD进行两两聚合
num = rdd1.reduce(lambda a, b: a+b)
print(num, type(num)) # 15 <class 'int'>

# take() 取出RDD前N个元素
take_list = rdd1.take(3)
print(take_list) # [1, 2, 3]

# count() 计算RDD有多少个元素
count_num = rdd1.count()
print(count_num) # 5



# 关闭
sc.stop()