import os
import shutil

from pyspark import SparkContext, SparkConf
import sys

if __name__ == "__main__":
    print("hello world")
    conf = SparkConf().setAppName("python_spark_app")
    default_path = "./files"
    data_path = None
    is_local = False
    if len(sys.argv) == 1:
        conf.setMaster("local")
        is_local = True
        data_path = default_path
    else:
        data_path = sys.argv[1]

    print(data_path)

    outputPath = data_path + "/wordcount"
    if is_local:
        if os.path.exists(outputPath):
            shutil.rmtree(outputPath)

    sc = SparkContext(conf=conf)

    source_data_RDD = sc.textFile(data_path + "/test.txt")

    words_RDD = source_data_RDD.flatMap(lambda line: line.split())

    key_value_words_RDD = words_RDD.map(lambda s: (s, 1))

    word_count_RDD = key_value_words_RDD.reduceByKey(lambda a, b: a + b)

    word_count_RDD.saveAsTextFile(outputPath)

    print (word_count_RDD.collect())


