from pyspark import SparkConf, SparkContext
import os

if __name__ == '__main__':
    os.environ['PYSPARK_PYTHON'] = "D:\\Python\\Python310\\python.exe"
    conf = SparkConf().setMaster("local[*]").setAppName("create rdd")
    sc = SparkContext(conf=conf)
    rdd = sc.parallelize([1, 2, 3, 4, 5])


    def map_func(data):
        return data * 10


    # print(rdd.map(map_func).collect())

    print(rdd.map(lambda data: data * 10).collect())
