from BeautifulSoup import BeautifulSoup
from urllib2 import urlopen
from urlparse import urljoin
import feedparser

def find_feed_from_sites(urls, must_have_date_info=True):
    """
    >>> urls = ['http://eflorenzano.com', 'http://pownce.com/ericflo', 'http://twitter.com/ericflo']
    >>> find_feed_from_sites(urls)
    u'http://eflorenzano.com/blog/feeds/all/'
    >>> urls = ['http://pownce.com/ericflo', 'http://twitter.com/ericflo', 'http://eflorenzano.com']
    >>> find_feed_from_sites(urls)
    u'http://pownce.com/feeds/public/ericflo.atom'
    >>> find_feed_from_sites([])
    >>>
    """
    for url in urls:
        feed = find_feed(url, must_have_date_info=True)
        if feed is not None:
            return (feed, url)
    return None

def find_feed(url, must_have_date_info=True):
    """
    >>> find_feed('http://www.eflorenzano.com')
    u'http://www.eflorenzano.com/blog/feeds/all/'
    >>> find_feed('http://pownce.com/thauber/')
    u'http://pownce.com/feeds/public/thauber.atom'
    """
    content = urlopen(url).read()
    soup = BeautifulSoup(content)
    rels = soup.findAll('link', rel="alternate", type="application/atom+xml")
    if len(rels) == 0:
        rels = soup.findAll('link', rel="alternate", type="application/rss+xml")
    for rel in rels:
        rel_url = urljoin(url, rel.attrMap['href'])
        if must_have_date_info:
            if has_date_info(rel_url):
                return rel_url
        else:
            return rel_url
    return None

def has_date_info(url):
    """
    >>> has_date_info('http://www.eflorenzano.com/blog/feeds/all/')
    True
    >>> has_date_info('http://www.thauber.com/blog/feeds/all/')
    False
    """
    fetched = feedparser.parse(url)
    for entry in fetched.entries:
        try:
            entry.updated_parsed
            return True
        except AttributeError:
            return False
    return False

if __name__ == "__main__":
    import doctest
    (failure_count, test_count) = doctest.testmod()
    if failure_count == 0:
        print "All tests pass!"
