# -*- coding: utf-8 -*-
#
# Copyright © 2010 Benedikt Eger
#
# This file is part of top-news-buzz.
#
# top-news-buzz is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# top-news-buzz is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with top-news-buzz.  If not, see <http://www.gnu.org/licenses/>.

import logging
import math
import random

from datetime import datetime, timedelta

from google.appengine.ext import webapp
from google.appengine.ext import db

from google.appengine.api import urlfetch
from google.appengine.api.urlfetch_errors import DownloadError

from topnewsbuzz.news import NewsArticle
from topnewsbuzz.config import Config
from topnewsbuzz.utils import HTMLStripper
from topnewsbuzz.lib import feedparser

class Feed(db.Model):

    url = db.StringProperty(required = True)
    boost = db.FloatProperty(default = 1.0)
    last_retrieved = db.DateTimeProperty(default = datetime.fromtimestamp(0))


class FeedManager(webapp.RequestHandler):

    def get(self):
        feed_retrieval_deadline = datetime.now() - timedelta(minutes = Config.feed_retrieval_delay_minutes())
        feeds = Config.get_feeds()
        for feed in feeds:
            if feed.last_retrieved > feed_retrieval_deadline:
                logging.debug('Skipping feed %s.' % feed.url)
                continue
            logging.debug('Getting feed %s.' % feed.url)
            try:
                result = urlfetch.fetch(feed.url)
            except DownloadError, error:
                logging.warning('Could not get feed %s - %s' % (feed.url, error.message))
                continue
            if result.status_code == 200:
                feed.last_retrieved = datetime.now()
                feed.put()
                self.__parse_feed(result.content, feed.boost)
            elif result.status_code == 500:
                logging.error('Feed %s returned with status code 500.' % feed.url)
            elif result.status_code == 404:
                logging.error('Error 404: Nothing found at %s.' % feed.url)

    def __parse_feed(self, feed_content, boost):
        feed = feedparser.parse(feed_content)
        for entry in feed.entries:
            self.__prepare_article(entry, boost)


    def __prepare_article(self, entry, boost):
        title = entry.title
        if(entry.has_key('feedburner_origlink')):
            url = entry.feedburner_origlink
        else:
            url = entry.link
        if entry.has_key('content'):
            content = entry.content[0].value
        else:
            content = entry.description
        if Config.strip_html():
            stripper = HTMLStripper()
            stripper.feed(title)
            title = stripper.get_data()
            stripper = HTMLStripper()
            stripper.feed(content)
            content = stripper.get_data()
        if(entry.has_key('updated_parsed')):
            date_published = datetime(*entry.updated_parsed[:6])
        else:
            date_published = datetime.now()
        year_published = date_published.year
        month_published = date_published.month
        day_published = date_published.day
        isoweek_published = date_published.isocalendar()[1]
        isoyear_published = date_published.isocalendar()[0]

        db.run_in_transaction(self.__store_article, title, url, self.__generate_id(url), content, date_published, year_published, month_published, day_published, isoweek_published, isoyear_published, boost)


    def __store_article(self, title, url, id, content, date_published, year_published, month_published, day_published, isoweek_published, isoyear_published, score_boost):
        news = NewsArticle.get_by_key_name(url)        
        if news is None:
            news = NewsArticle(key_name = url, title = title, url = url, id = id, content = content, publish_date = date_published, publish_year = year_published, publish_month = month_published, publish_day = day_published, publish_isoweek = isoweek_published, publish_isoyear = isoyear_published, score_boost = score_boost)
            news.put()
            logging.info('Adding new article "%s"' % news.title)
        return news

    def __generate_id(self, url):        
        id = random.randrange(math.pow(10,8), math.pow(10,9))
        query = db.GqlQuery('SELECT * FROM NewsArticle WHERE id = :id', id = id)
        articles = query.fetch(1)
        while len(articles) > 0:
            id = id + 1
            query = db.GqlQuery('SELECT * FROM NewsArticle WHERE id = :id', id = id)
            articles = query.fetch(1)
        return id
            
        