import os
import sys
import time
import socket
import optparse
import datetime
import feedparser
from aggregator.models import Feed, FeedItem
from BeautifulSoup import BeautifulSoup
from django.core.management.base import NoArgsCommand

LOCKFILE = "/tmp/update_feeds.lock"
VERBOSE = True

class Command(NoArgsCommand):

    help = 'Import the shp files into the database using the Django models.'

    def handle_noargs(self, **options):
        socket.setdefaulttimeout(15)
        try:
            lockfile = os.open(LOCKFILE, os.O_CREAT | os.O_EXCL)
        except OSError:
            sys.exit(0)
        try:
            sys.exit(self.update_feeds())
        finally:
            os.close(lockfile)
            os.unlink(LOCKFILE)        

    def update_feeds(self):

        for feed in Feed.objects.filter(is_defunct=False):
            if VERBOSE:
                print feed
            parsed_feed = feedparser.parse(feed.feed_url)
            for entry in parsed_feed.entries:
                title = entry.title.encode(parsed_feed.encoding, "xmlcharrefreplace")
                guid = entry.get("id", entry.link).encode(parsed_feed.encoding, "xmlcharrefreplace")
                link = entry.link.encode(parsed_feed.encoding, "xmlcharrefreplace")

                if not guid:
                    guid = link

                if hasattr(entry, "summary"):
                    content = entry.summary
                elif hasattr(entry, "content"):
                    content = entry.content[0].value
                elif hasattr(entry, "description"):
                    content = entry.description
                else:
                    content = u""
                content = content.encode(parsed_feed.encoding, "xmlcharrefreplace")

                try:
                    if entry.has_key('modified_parsed'):
                        date_modified = datetime.datetime.fromtimestamp(time.mktime(entry.modified_parsed))
                    elif parsed_feed.feed.has_key('modified_parsed'):
                        date_modified = datetime.datetime.fromtimestamp(time.mktime(parsed_feed.feed.modified_parsed))
                    elif parsed_feed.has_key('modified'):
                        date_modified = datetime.datetime.fromtimestamp(time.mktime(parsed_feed.modified))
                    else:
                        date_modified = datetime.datetime.now()
                except TypeError:
                    date_modified = datetime.datetime.now()

                try:
                    feed.feeditem_set.get(guid=guid)
                except FeedItem.DoesNotExist:
                    # stripping out some stuff from google feed for now
                    title, sep, pub = title.partition(' - ')
                    summary = ''
                    for j in BeautifulSoup(content).find(
                                'div',{'class':'lh',}).findAll(
                                        'font',{'size':'-1'})[1].contents:
                        summary += unicode(j)

                    feed.feeditem_set.create(
                            title=title.strip(), 
                            publication = pub.strip(), 
                            link=link, 
                            summary=summary, 
                            guid=guid, 
                            date_modified=date_modified)

            # if there is more than x feed items delete the old ones
            all_items = feed.feeditem_set.all()
            if len(all_items) > 30:
                for i in all_items[15:]:
                    i.delete()
