# -*- coding: utf-8 -*-

import logging
import math
import random
import re

from datetime import datetime, timedelta

from google.appengine.ext import webapp
from google.appengine.ext import db

from google.appengine.api import urlfetch
from google.appengine.api.urlfetch_errors import DownloadError
from google.appengine.api import taskqueue
from google.appengine.api import images
from BeautifulSoup import BeautifulSoup, Comment


from utilities import *
import feedparser
from config import Config


class FeedManager(webapp.RequestHandler):
    def get(self):
      feed_retrieval_deadline = datetime.now() - timedelta(minutes = Config.feed_retrieval_delay_minutes())
      logging.debug('feed_retrieval_deadline %s.' % feed_retrieval_deadline)

      feeds = Config.get_feeds()
      for feed in feeds:
        if feed.last_retrieved > feed_retrieval_deadline:
          logging.debug('Skipping feed %s.' % feed.url)
          continue
        logging.debug('Getting feed %s.' % feed.url)
        try:
          result = urlfetch.fetch(feed.url)
        except DownloadError, error:
          logging.warning('Could not get feed %s - %s' % (feed.url, error.message))
          continue
        if result.status_code == 200:
          feed.last_retrieved = datetime.now()
          feed.put()
          self.__parse_feed(result.content)
        elif result.status_code == 500:
          logging.error('Feed %s returned with status code 500.' % feed.url)
        elif result.status_code == 404:
          logging.error('Error 404: Nothing found at %s.' % feed.url)

    def __parse_feed(self, feed_content):
      feed = feedparser.parse(feed_content)
      for entry in feed.entries:
        self.__prepare_article(entry)
        logging.debug('Prepare article %s.' % entry.link)


    def __prepare_article(self, entry):
      url = entry.link

      if(entry.has_key('updated_parsed')):
        date = datetime(*entry.updated_parsed[:6])
      else:
        date = datetime.now()

      self.__store_article(url, date)


    def __store_article(self, url, date):
      parsedurl = urlparse(url)
      entryDomain = parsedurl.hostname

      if entryDomain.startswith('www.'):
        entryDomain = entryDomain[4:]

      domainsquery = Domain.all()
      domainsquery.filter(" domain = ", entryDomain)
      domains = domainsquery.fetch(1)

      if len(domains) > 0:
        aDomain = domains[0]
      else:
        aDomain = Domain(domain = entryDomain, pubDate = date)
        aDomain.put()

        logging.info('Adding new article "%s"' % entryDomain)

      return aDomain
        