"""
演示将RDD输出到文件中
"""
import json

from pyspark import SparkConf, SparkContext
import os
os.environ['PYSPARK_PYTHON'] = "C:/Users/28474/AppData/Local/Programs/Python/Python310/python.exe"
os.environ['HADOOP_HOME'] = "G:/install/study/hadoop3.0.0"
conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
# conf.set("spark.default.parallelism", "1")
sc = SparkContext(conf=conf)

# 准备RDD1
rdd1 = sc.parallelize([1,2,3,4,5], numSlices=1)
# 准备RDD2
rdd2 = sc.parallelize([("Hello", 3),("Spark", 5), ("Hi", 7)],1)
# 准备RDD3
rdd3 = sc.parallelize([[1,3,5],[6,7,9],[11,13,11]],1)
# 输出到文件中
rdd1.saveAsTextFile("E:/storage/study/Python/python-learn/day01/02-python-learn/13_pyspark/素材/list")
rdd2.saveAsTextFile("E:/storage/study/Python/python-learn/day01/02-python-learn/13_pyspark/素材/tuple")
rdd3.saveAsTextFile("E:/storage/study/Python/python-learn/day01/02-python-learn/13_pyspark/素材/twoList")

sc.stop()