#创建spark环境
from pyspark.context import SparkContext

sc = SparkContext(master='local',appName='word_count')

#读取文件
lines_rdd = sc.textFile('../../data/word.txt')


# words_rdd = lines_rdd.map(lambda line:line.split(','))
words_rdd = lines_rdd.flatMap(lambda line:line.split(','))

kv_rdd = words_rdd.map(lambda x: (x,1))

count_rdd = kv_rdd.reduceByKey(lambda x,y:x+y)
count_rdd.foreach(print)