from pyspark import SparkConf, SparkContext

# 创建SparkConf
conf = SparkConf().setMaster('local[*]').setAppName('test_spark_app')

sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5])

print(rdd.collect())

# 通过文件
rdd2 = sc.textFile('hello')
print(rdd2.collect())

# 停止spark
sc.stop()
