import feedparser
import pickle
import re
import os

def wordcount(feed_url):
    '''Given a feed url, extract all the words in the entries and make a count
    of each of them.
    Returns a dictionary in the form {'word': numeric_count, 'word2': numeric_count}'''
    
    wcount = {}
    feed_dict = feedparser.parse(feed_url)
    
    entries = feed_dict['entries']
    for entry in entries:
        html = entry.get('summary', '') or entry.get('description', '')
        words = words_from_html(html)
        for word in words:
            wcount.setdefault(word, 0)
            wcount[word] += 1        
    
    return wcount
    
def words_from_html(html):
    '''Returns a list of words (strings) in lowercase given some (x)html text'''
    # Remove tags
    text = re.compile(r'<[^>]+>').sub('', html)
    words = re.compile(r'[^A-Z^a-z]+').split(text)
    return [word.lower() for word in words if word != '']


def get_wordcount_info(feeds_fname, stored_info_fname='data/saved.txt', force_update=False):
    '''Return a dictionary containing the word count for each feed.
    The dictionary is in the form {'feed_url': {'word1: numeric_count,
                                                'word2: numeric_count }
                                   'feed_url2':{'word1: numeric_count,
                                                'word2: numeric_count }}.                               
    feeds_fname -> A string with the filename or path that contains a newline
    separated list of feed urls.
    stored_info_fname -> A string with the filename or path that contains the stored
    information, if it doesnt exist or it has no info then the information will be 
    saved there.
    force_update -> Flag to indicate if we should update the info even if it exists
    '''
    
    # If the file with stored info doesn't exist, just create it
    if not os.path.exists(stored_info_fname):
        open(stored_info_fname, 'w').close()
    # Open the file with the feeds, raise exception if it doesn't exist 
    feeds_file = open(feeds_fname)
    # Open the file with the previously stored info
    stored_info_file = open(stored_info_fname)
    # Try loading the previously stored info, it there isn't any then
    # just create and empty dict
    try: wordcount_info = pickle.load(stored_info_file)
    except EOFError: wordcount_info = {}
    stored_info_file.close()
    # Return the old info if we don't have to update and there was stored info
    if not force_update and wordcount_info:
        return wordcount_info
    # Else read/update all the feeds and save the wordcount. This may take
    # several minutes.
    feed_urls = [url.strip() for url in feeds_file.readlines()] # Load in memory
    
    try: 
        for i, feed_url in enumerate(feed_urls):
            print "Reading blog %i of %i" % (i+1, len(feed_urls))
            wcount = wordcount(feed_url)
            if wcount: # Don't save empty stuff
                wordcount_info[feed_url] = wcount
    except KeyboardInterrupt:
        pass # Just continue and save the info that was read 
    
    # Reopen to write
    stored_info_file = open(stored_info_fname, 'w')
    # Save the wordcount info
    pickle.dump(wordcount_info, stored_info_file)
    # Close the files
    stored_info_file.close()
    feeds_file.close()
    # Return the wordcount dict
    return wordcount_info