from pyspark import SparkConf,SparkContext
import os
os.environ['PYSPARK_PYTHON']='D:/Pythonworks\python1\.venv\Scripts'
conf=SparkConf().setMaster("local[*]").setAppName("my app")
sc=SparkContext(conf=conf)
data=[1,2,3,4,5]
rdd=sc.parallelize(data)
print(rdd.collect())
print(rdd.reduce(lambda x,y:x+y))
print(rdd.map(lambda x:x*2).collect())
print(rdd.filter(lambda x:x%2==0).collect())
print(rdd.flatMap(lambda x:range(1,x)).collect())