from pyspark import SparkConf, SparkContext
import os

if __name__ == '__main__':
    os.environ["PYSPARK_PYTHON"] = "D:/Python/Python310/python.exe"
    os.environ["HADOOP_HOME"] = "D:/Python/hadoop-3.0.0"
    conf = SparkConf().setMaster("local[*]").setAppName("create rdd")
    sc = SparkContext(conf=conf)
    # conf.set("spark.default.parallelism", "1")
    rdd = sc.parallelize([1, 2, 3, 4, 5, 6, 7, 8], numSlices=1)
    rdd.saveAsTextFile("D:/outfile")
