# 输出

from pyspark import SparkConf,SparkContext
import os
os.environ["PYSPARK_PYTHON"] = "D:/Soft/Python/Python310/python.exe"

if __name__ == '__main__':
    conf = SparkConf().setMaster("local[*]").setAppName("test")
    sc = SparkContext(conf=conf)

    rdd = sc.parallelize([1,2,3,4,5])

    # collect
    print(rdd.collect())
    # reduce
    print(rdd.reduce(lambda a,b:a+b))
    # take，类似limit
    print(rdd.take(3))
    # count
    print(rdd.count())

    sc.stop()