from pyspark import SparkConf, SparkContext
import os

if __name__ == '__main__':
    os.environ["PYSPARK_PYTHON"] = "D:/Python/Python310/python.exe"
    conf = SparkConf().setMaster("local[*]").setAppName("create rdd")
    sc = SparkContext(conf=conf)
    rdd = sc.parallelize(range(1, 10))
    print(rdd.reduce(lambda a, b: a + b))
    print(rdd.take(5))
    print(rdd.count())
