# from __future__ import print_function
import sys
from operator import add
import os

from pyspark import SparkContext
from pyspark import SparkConf

inputFile = "../Harry.txt"
outputFile = "../out_data/llll"
sc = SparkContext()
text_file = sc.textFile(inputFile)
counts = text_file.flatMap(lambda line: line.split(' ')).map(lambda word: (word, 1)).reduceByKey(lambda a, b: a + b)
counts.saveAsTextFile(outputFile)
