from pyspark import SparkConf, SparkContext
import os

if __name__ == '__main__':
    os.environ["PYSPARK_PYTHON"] = "D:/Python/Python310/python.exe"
    conf = SparkConf().setMaster("local[*]").setAppName("create rdd")
    sc = SparkContext(conf=conf)

    rdd = sc.parallelize([1, 2, 3, 4, 5])
    rdd_filter = rdd.filter(lambda x: x % 2 == 1)
    print(rdd_filter.collect())
