#!/usr/bin/python

import nltk
import sys
import os
import pickle
import operator

class TfIaf(object):
  def __init__(self, book_list, n):
    self.book_list = book_list
    self.n = n
    self.stemmer = nltk.PorterStemmer()
    self.author_book_list_map = self._CreateAuthorBookListMap()

  def _CreateAuthorBookListMap(self):
    author_book_list_map = {}
    for book in self.book_list:
      author = book.split('/')[-2]
      if author not in author_book_list_map.iterkeys():
        author_book_list_map[author] = []
      author_book_list_map[author].append(book)
    return author_book_list_map

  def MakeNGrams(self, book_name):
    book_file = open(book_name, 'r')
    content = book_file.read()
    tokens = nltk.word_tokenize(content)
    stemmed_tokens = [self.stemmer.stem(token).lower() for token in tokens]
    ngram_final_index = len(stemmed_tokens) - self.n
    ngrams = nltk.ngrams(stemmed_tokens, self.n)
    return ngrams

  def MakeNGramsFreqForAuthor(self, author):
    print 'Author:', author
    ngrams = []
    author_books = self.author_book_list_map[author]
    num_ngrams = 0
    for book in author_books:
      print '\t', book[:-1]
      book_ngrams = self.MakeNGrams(book[:-1])
      num_ngrams += len(book_ngrams)
      ngrams.extend(book_ngrams)
    freq_dist = nltk.FreqDist(ngrams)
    top_freq_dist = nltk.FreqDist()
    count = 0
    for k, v in freq_dist.iteritems():
      top_freq_dist[k] = float(v * 1000000)/num_ngrams
      count = count + 1
      if count >= 5000:
        break
    return top_freq_dist

  def Iaf(self, ngram, author_ngrams_map):
    count = 0
    authors = 0.0
    for author, ngrams in author_ngrams_map.iteritems():
      authors = authors + 1
      if ngrams.has_key(ngram):
        count = count + 1
    return authors/count

  def GenerateTfIaf(self):
    author_ngrams_map = {}
    all_frequencies = []
    for author in self.author_book_list_map.iterkeys():
      freq_ngrams = self.MakeNGramsFreqForAuthor(author)
      all_frequencies.extend(freq_ngrams.values())
      author_ngrams_map[author] = freq_ngrams

    print 'Calculating TF-IAF:'
    for author, ngram_freq in author_ngrams_map.iteritems():
      print 'Calculating TF-IAF for author:', author
      for ngram, freq in ngram_freq.iteritems():
        iaf = self.Iaf(ngram, author_ngrams_map)
        ngram_freq[ngram] = freq*iaf

    tf_iaf = {}
    all_tf_iafs = []
    for author, ngrams in author_ngrams_map.iteritems():
      tf_iaf[author] = sorted([list(i) for i in ngrams.items()],
                              key=operator.itemgetter(1),
                              reverse=True)[0:50]
      all_tf_iafs.extend([j for i, j in tf_iaf[author]])
    median = sorted(all_tf_iafs)[len(all_tf_iafs)/2]

    for author, ngrams in tf_iaf.iteritems():
      for l in ngrams:
        b = l[1]
        b = int(float(b * 10)/(b + median))
        l[1] = b
    return tf_iaf

book_list_file = open(sys.argv[1], 'r')
book_list = book_list_file.readlines()
tf_iaf = TfIaf(book_list, 3)
tfiaf = tf_iaf.GenerateTfIaf()
pickle.dump(tfiaf, open('dump.p', 'w'))

