from pyspark import SparkConf, SparkContext
import os

os.environ['PYSPARK_PYTHON'] = "C:/Python310/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5])


def func(data):
    return data * 10


# 通过map方法将全部数据乘以10
rdd2 = rdd.map(func)

print(rdd2.collect())
