from pyspark import SparkConf, SparkContext

conf = SparkConf().setMaster('local[*]').setAppName('test_pyspark')
sc = SparkContext(conf=conf)

"""
将数据转为RDD的方式：
    方式一：
        sc.parallelize()
    方式二：
        sc.textFile()
"""
# 列表
list = [1, 2, 3, 4]
rdd = sc.parallelize(list)
print(f'list转为RDD的结果为：{rdd.collect()}')

# 元组
tuple = (1, 2, 3, 4)
rdd = sc.parallelize(tuple)
print(f'tuple转为RDD的结果为：{rdd.collect()}')

# 字符窜
str = "abcdefg"
rdd = sc.parallelize(str)
print(f'str转为RDD的结果为：{rdd.collect()}')


# 集合
set = {1, 2, 3, 4}
rdd = sc.parallelize(set)
print(f'set转为RDD的结果为：{rdd.collect()}')


# 字典
dict = {'key1':1, 'key2':2, 'key3':3, 'key4':4}
rdd = sc.parallelize(dict)
print(f'dict转为RDD的结果为：{rdd.collect()}')

# 读取文档中的数据
text_path = 'D:\\good.txt'
rdd = sc.textFile(text_path)
print(f'文件内容转为RDD的结果为：{rdd.collect()}')

# 关闭pyspark
sc.stop()