# FeedFetcher - Update all feeds to local cache. GUI reads feeds from cache.
# Copyright (C) 2007 Lauri Taimila
# 
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# 
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
# 
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

__licence__ = "GPLv2"
__copyright__ = "2007, Lauri Taimila"
__author__ = "Lauri Taimila <lauri@taimila.com>"

import os
import threading
import feedparser
from time import strptime
from datetime import datetime
from pysqlite2 import dbapi2 as sqlite

# Messaging system
from backend.core.message import Message
from backend.core.message_type_priority import MessageType
from backend.core.message_bus import MessageBus

class FeedFetcher(threading.Thread):
    """Update all feeds to the local feed cache database."""
    
    # Number of entries to keep in cache for each feed
    NUMBER_OF_ENTRIES = 50
    
    # Feed database file
    FEED_DB = os.path.expanduser('~/.entertainer/cache/feed.db')
    
    def __init__(self, message_bus, logger, feeds):
        """
        Create a new feed fetch thread
        @param message_bus: MessageBus object
        @param logger: Logger object
        @param feeds: List of feeds (List of URL-strings)
        """
        threading.Thread.__init__(self)
        self.setName("FeedFetcher")
        self.message_bus = message_bus
        self.logger = logger
        self.feeds = feeds
        
    def run(self):
        """Update feed cache in a seperated thread"""
        db_conn = sqlite.connect(self.__class__.FEED_DB)
        db_cursor = db_conn.cursor()
        for element in self.feeds:
            has_new_entries = False
            data = feedparser.parse(element)
            if data.bozo:
                self.logger.warning("Malformed feed skipped: " + element)
                continue # move to the next feed
            try:
                dt = data.feed.date_parsed
                last_update = datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
            except AttributeError:
                try:
                    dt = data.entries[0].date_parsed
                    last_update = datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
                except:
                    self.logger.warning("Couldn't determine feed date of " + data.feed.title)
                    continue # Skip to the next feed
            
            #Check if this feed has new entries
            db_cursor.execute("""SELECT date,time 
                                 FROM   feed 
                                 WHERE  url=:url""", 
                                 { "url": data.feed.link})
            result = db_cursor.fetchall()
            
            if(len(result) == 0):
                # Set in past so that all entries are added to the database!
                current_update = datetime(1970, 01, 01, 00, 00, 00)
                
                # This is a completely new feed
                feed_row = (data.feed.link, 
                            data.feed.title, 
                            data.feed.subtitle, 
                            data.channel.description,
                            last_update.strftime("%Y-%m-%d"),
                            last_update.strftime("%H:%M:%S"))
                db_cursor.execute("""INSERT INTO feed(url,title,subtitle,description,date,time) 
                                     VALUES (?,?,?,?,?,?)""", feed_row)
                db_conn.commit()
                has_new_entries = True
            else:
                # This feed is already in cache. Check if it has new entries
                tmp = str(result[0][0]) + " " + str(result[0][1])
                current_update = datetime(*strptime(tmp, "%Y-%m-%d %H:%M:%S")[0:6])
                if last_update > current_update:
                    db_cursor.execute("UPDATE feed SET date=?,time=? WHERE url=?",
                                      (last_update.strftime("%Y-%m-%d"),
                                       last_update.strftime("%H:%M:%S"),
                                       data.feed.link))
                    db_conn.commit()
                    has_new_entries = True
            
            # If this feed has new entries we update the cache
            if has_new_entries: 
                for i in range(len(data.entries)):
                    ed = data.entries[i].date_parsed
                    e_timestamp = datetime(ed[0], ed[1], ed[2], ed[3], ed[4], ed[5])
                    
                    # If there is no entry ID we generate one from URL and entry datetime
                    try:
                        entry_id = data.entries[i].id
                    except:
                        entry_id = data.feed.link + "_" + e_timestamp.strftime("%Y-%m-%d") + "_" + e_timestamp.strftime("%H:%M:%S")
                        
                    # If entry is new (not cached in previous update)
                    if e_timestamp > current_update:
                        entry_row = (data.feed.link,
                                     data.entries[i].title, 
                                     data.entries[i].description,
                                     "NO", # This entry hasn't been read yet
                                     data.entries[i].id,
                                     e_timestamp.strftime("%Y-%m-%d"),
                                     e_timestamp.strftime("%H:%M:%S"))
                        db_cursor.execute("""INSERT INTO entry(feed_url,title,description,isread,id,date,time) 
                                             VALUES (?,?,?,?,?,?,?)""", entry_row)
                        db_conn.commit()
                    
                    # Remove old entries from cache if there are 
                    # more entries than limit allows
                    db_cursor.execute("""SELECT date, time 
                                         FROM entry 
                                         WHERE feed_url=:url 
                                         ORDER BY date, time""",
                                         { "url" : data.feed.link})
                    result = db_cursor.fetchall()
                    if len(result) > self.__class__.NUMBER_OF_ENTRIES:
                        limit_date = result[self.__class__.NUMBER_OF_ENTRIES][0]
                        limit_time = result[self.__class__.NUMBER_OF_ENTRIES][1]
                        db_cursor.execute("""DELETE FROM entry 
                                             WHERE feed_url=:url
                                             AND date < :date
                                             AND time < :time""",
                                             { "url" : str(data.feed.link), 
                                               "date" : str(limit_date) , 
                                               "time": str(limit_time)})
                        db_conn.commit()
                    
        # Notify that feed database has been updated
        db_conn.close()
        self.logger.info("Feed cache has been updated")
        self.message_bus.notifyMessage(Message(MessageType.FEED_DB_UPDATED))
