"""
数据计算RDD中的成员方法flatMap
"""
from pyspark import SparkConf, SparkContext

# 注意 windows环境可能需要设置 指定python的编译器↓
# import os
# os.environ["PYSPARK_PYTHON"] = "D:/tools/python/python3.10.9/python.exe" # windows python.exe address

conf = SparkConf().setMaster("local[*]").setAppName("test_spark_app")
sc = SparkContext(conf=conf)

rdd = sc.parallelize(["a b c", "d, e, f", "j, h, k"])

# 需求，将RDD里面的字符一个个拿到
rdd2 = rdd.map(lambda element: element.split(" "))

# 解除嵌套
rdd3 = rdd.flatMap(lambda element: element.split(" "))
# 获取rdd数据
print(rdd2.collect())  # [['a', 'b', 'c'], ['d,', 'e,', 'f'], ['j,', 'h,', 'k']]
print(rdd3.collect())  # ['a', 'b', 'c', 'd,', 'e,', 'f', 'j,', 'h,', 'k']

# 关闭pyspark
sc.stop()
