from pyspark import SparkConf, SparkContext
import os

os.environ['PYSPARK_PYTHON'] = r"D:\Python\Python3107\python.exe"
conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1,2,3,4,5,6,7,8,9,10])
rdd2 = rdd.filter(lambda x:x%2==0)
print(rdd2.collect())

sc.stop()
