from pyspark import SparkConf, SparkContext


## 手动定义解释器地址
# import os
# os.environ["PYSPARK_PYTHON"]="C:/Users/13487/AppData/Local/Microsoft/WindowsApps/python3.12.exe"

# 初始化配置
conf = SparkConf().setAppName("task_spark").setMaster("local[*]")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5])
rdd2 = rdd.map(lambda  x:x)

print(rdd2.collect())
# for item in rdd2.collect():
#     print(item)

# 关闭Context
sc.stop()

print("我执行完了")
