from pyspark import SparkConf, SparkContext

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

# 用testFile方法，读取文件数据加载到Spark内，成为RDD对象
rdd = sc.textFile("/Users/zhangguoqiang/PycharmProjects/pythonProject/hello.txt")
print(rdd.collect())

sc.stop()