#!/usr/bin/env python

import sys
import time
import urllib2
import getpass
import xml.sax
import optparse
import pprint
import pickle
import re
import StringIO
import logging
import logging.config
import codecs

# Set up the logger
logging.config.fileConfig('suck.config')
logger = logging.getLogger('suck')

SAVE_FILE = 'pickled.txt'

class BloglinesItemsFinder(xml.sax.handler.ContentHandler):
    def __init__(self):
        self.items = []
        self.prefix = []

    def addItem(self, name, attrs):
        m = {}
        for key in attrs.keys():
            m[key] = attrs[key]
        m['fullName'] = self.prefix + [m['title']]
        self.items.append(m)
        
    def startElement(self, name, attrs):
        if name == 'outline':
            self.addItem(name, attrs)
            self.prefix.append(attrs['title'])
            #print 'prefix', self.prefix
            
    def endElement(self, name):
        if name == 'outline':
            self.prefix = self.prefix[:len(self.prefix)-1]
            #print 'prefix', self.prefix


class BloglinesItemsReader(xml.sax.handler.ContentHandler):
    def __init__(self):
        self.items = []
        self.title = ''
        self.link = ''
        self.thisItem = None
        self.thisAttribute = None
        self.chars = ''

    def startElement(self, name, attrs):
        self.chars = ''
        if name == 'item':
            self.thisItem = {'feedTitle': self.title, 'feedLink': self.link}
        else:
            if self.thisItem is not None:
                self.thisAttribute = name

    def characters(self, chars):
        self.chars += chars

    def endElement(self, name):
        if name == 'item':
            self.items.append(self.thisItem)
            self.thisItem = None
            self.thisAttribute = None
        elif name == self.thisAttribute:
            self.thisItem[name] = self.chars
        elif self.thisItem is None:
            if name == 'title':
                self.title = self.chars
            elif name == 'link':
                self.link = self.chars
        self.chars = ''

def sortItems(items):
    def getTime(item):
        return time.strptime(item['pubDate'], '%a, %d %b %Y %H:%M:%S %Z')

    times = zip(map(getTime, items), items)
    times.sort()
    times.reverse()
    return map(lambda x: x[1], times)
        

def printItems(items, outputFile):
    escapeAmpRe = re.compile('\\&($|[^a-z#])');

    if outputFile is not None:
        out = codecs.open(outputFile, 'wb', 'utf-8')
    else:
        out = sys.stdout
        
    out.write(codecs.open('template_head.xhtml', 'rb', 'utf-8').read())
    entryTemplate = codecs.open('template_entry.xhtml', 'rb', 'utf-8').read()
    for item in items:
        logger.debug('about to format ' + pprint.pformat(item))
        item['description'] = item['description'].replace('=_blank', '="_blank"')
        item['description'] = item['description'].replace('=blines3', '="blines3"')
        item['description'] = item['description'].replace('=blines2', '="blines2"')
        item['description'] = item['description'].replace('<br>', '<br/>')
        item['description'] = escapeAmpRe.sub(r'&amp;\1', item['description'])
        
        if item.has_key('link'):
            item['link'] = xml.sax.saxutils.escape(item['link'])
        item['title'] = escapeAmpRe.sub(r'&amp;\1', item['title'])
        item['title'] = item['title'].replace('"', '&quot;') # All Consuming        
        

        if not item.has_key('link'):
            item['link'] = ''
            
        out.write(entryTemplate % item)
    out.write(codecs.open('template_foot.xhtml', 'rb', 'utf-8').read())

def sanitizeItems(items):
    for item in items:
        if not item.has_key('dc:subject'):
            item['dc:subject'] = ''

def main(args=None):
    global options
    if args == None:
        args = sys.argv[1:]

    optParser = optparse.OptionParser()
    optParser.add_option('--item', action='store', type='string', dest='item', default='Me')
    optParser.add_option('--verbose', action='store_true', dest='verbose')
    optParser.add_option('--output', action='store', dest='outputFile', type='string', default=None)
    optParser.add_option('--read', action='store_true', dest='read', help='read feeds from file instead of hitting Bloglines')
    optParser.add_option('--save', action='store_true', dest='save', help='save feeds to a file for future formatting')
    
    (options, args) = optParser.parse_args(args)

    if options.verbose:
        console = logging.StreamHandler()
        console.setLevel(logging.INFO)
        logger.addHandler(console)

    authRealm = 'Bloglines RPC'
    domain = 'rpc.bloglines.com'
    #username = raw_input("User Name: ")
    username = 'blair_conrad@alumni.uwaterloo.ca'

    allData = []
    if options.read:
        allData = pickle.load(file(SAVE_FILE, 'rb'))
    else:
        password = getpass.getpass("Password: ")

        auth_handler = urllib2.HTTPBasicAuthHandler()
        auth_handler.add_password(authRealm, domain, username, password)
        opener = urllib2.build_opener(auth_handler)
        # ...and install it globally so it can be used with urlopen.
        urllib2.install_opener(opener)

        parser = xml.sax.make_parser()
        f = BloglinesItemsFinder()
        parser.setContentHandler(f)
        parser.parse(urllib2.urlopen('http://rpc.bloglines.com/listsubs'))

        item = None
        for thing in f.items:
            if thing['fullName'][-1] == options.item:
                item = thing
                break

        if item is None:
            print "Can't find item '" + options.item + "' in feeds"
            return 1

        children = []
        for thing in f.items:
            if len(item['fullName']) < len(thing['fullName']):
                child = True
                for i in range(len(item['fullName'])):
                    if item['fullName'][i] != thing['fullName'][i]:
                        child = False
                        break
                if child:
                    children.append(thing)

        logger.debug('item ' + options.item + ' = ' + pprint.pformat(item))

        allItems = []
        for child in children:
            itemUrl = 'http://rpc.bloglines.com/getitems?s=%s&n=0&d=%d' % (child['BloglinesSubId'], (time.time()-10*86400))
            logger.info('fetching items for' + str(child['fullName']) + ' url = ' + itemUrl)
            try:
                data = urllib2.urlopen(itemUrl).read()
                allData.append((child['fullName'], data))
                
            # should have exception handling deal with the HTTP 304 (no change)s
            except Exception, e:
                logger.error('error when trying to read ' + str(child['fullName']) + ' - ' + str(e))
                #raise

    # done gathering allData
    if options.save:
            pickle.dump(allData, file(SAVE_FILE, 'wb'))

    allItems = []
    parser = xml.sax.make_parser()
    for name, data in allData:
        r = BloglinesItemsReader()
        parser.setContentHandler(r)
        parser.parse(StringIO.StringIO(data))
        allItems.extend(r.items)
    
    for item in allItems:
        for key in item:
            item[key] = item[key].strip()

    sanitizeItems(allItems)
    logger.debug('allItems = ' + pprint.pformat(allItems))

    printItems(sortItems(allItems), options.outputFile)
    return 0


if __name__ == '__main__':
    sys.exit(main())
