from pyspark import SparkConf, SparkContext
import os
os.environ['PYSPARK_PYTHON'] = r"D://Python/Python3107/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])


def func(x):
    return x * 10


#rdd2 = rdd.map(func)
rdd2 = rdd.map(lambda x: x * 10).map(lambda x: x + 5)

print(rdd2.collect())

sc.stop()
