from pyspark import SparkConf,SparkContext
import os
os.environ['PYSPARK_PYTHON'] = "C:/Users/28474/AppData/Local/Programs/Python/Python310/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark_context")
sc = SparkContext(conf=conf)

# 准备一个RDD
rdd = sc.parallelize([1,2,3,4,5])
# def func(data):
#     return data * 10
# 通过map方法将全部数据乘以10
# rdd2 = rdd.map(func)
rdd2 = rdd.map(lambda x: x * 10).map(lambda x: x + 5)
print(rdd2.collect())

sc.stop()