from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
import re

#
# class MRWordFrequencyCount(MRJob):
#
#     def mapper(self, _, line):
#         yield "chars", len(line)
#         yield "words", len(line.split())
#         yield "lines", 1
#
#     def reducer(self, key, values):
#         yield key, sum(values)
#
#
# if __name__ == '__main__':
#     MRWordFrequencyCount.run()
# python testmap.py my_file.txt my_file2.txt - < my_file3.txt

#

#
# WORD_RE = re.compile(r"[\w']+")
#
#
# class MRMostUsedWord(MRJob):
#
#     def steps(self):
#         return [
#             MRStep(mapper=self.mapper_get_words,
#                    combiner=self.combiner_count_words,
#                    reducer=self.reducer_count_words),
#             MRStep(reducer=self.reducer_find_max_word)
#         ]
#
#     def mapper_get_words(self, _, line):
#         # yield each word in the line
#         for word in WORD_RE.findall(line):
#             yield (word.lower(), 1)
#
#     def combiner_count_words(self, word, counts):
#         # optimization: sum the words we've seen so far
#         yield (word, sum(counts))
#
#     def reducer_count_words(self, word, counts):
#         # send all (num_occurrences, word) pairs to the same reducer.
#         # num_occurrences is so we can easily use Python's max() function.
#         yield None, (sum(counts), word)
#
#     # discard the key; it is just None
#     def reducer_find_max_word(self, _, word_count_pairs):
#         # each item of word_count_pairs is (count, word),
#         # so yielding one results in key=counts, value=word
#         yield max(word_count_pairs)

#
# WORD_RE = re.compile(r"[\w']+")
#
#
# class MRWordFreqCount(MRJob):
#
#     def init_get_words(self):
#         self.words = {}
#
#     def get_words(self, _, line):
#         for word in WORD_RE.findall(line):
#             word = word.lower()
#             self.words.setdefault(word, 0)
#             self.words[word] = self.words[word] + 1
#
#     def final_get_words(self):
#         for word, val in self.words.items():
#             yield word, val
#
#     def sum_words(self, word, counts):
#         yield word, sum(counts)
#
#     def steps(self):
#         return [MRStep(mapper_init=self.init_get_words,
#                        mapper=self.get_words,
#                        mapper_final=self.final_get_words,
#                        combiner=self.sum_words,
#                        reducer=self.sum_words
#                        )]

class KittyJob(MRJob):

    OUTPUT_PROTOCOL = JSONValueProtocol

    def mapper_cmd(self):
        return "grep kitty"

    def reducer(self, key, values):
        yield None, sum(1 for _ in values)


if __name__ == '__main__':
    KittyJob.run()

