from pyspark import SparkConf, SparkContext
import os

# 设置python解释器环境变量
os.environ['PYSPARK_PYTHON'] = 'D:/PYTHON/python3.10/python.exe'

conf = SparkConf().setMaster('local[*]').setAppName('my_test_spark')
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
rdd1 = rdd.filter(lambda num: num % 2 != 0)
# 过滤出全部的奇数
print(rdd1.collect())

sc.stop()
