from rsspackrat.main.config import DB_PATH
from rsspackrat.main.log import logger
import metakit


def save(func):
    def inner(*args, **kw):
        res = func(*args, **kw)
        database.db.commit()
        return res
    return inner


UNREAD = 0
READ = 1


class Storage(object):
    
    def __init__(self, storagepath):
        self.db = metakit.storage(storagepath, 1)
        self.feeds = self.db.getas("feeds[title:S,url:S]")
        filearchive_structure = ("filearchive[" + 
                                    "uid:S," +          #the file hash
                                    "location:S,"+      #location on disk
                                    "url:S,"+           #original URL
                                    "feedurl:S,"+       #url of the feed
                                    "viewed:I,"+        #file is viewed
                                    "guid:S"+           #the url for the post
                                    "]"
                                    )
        self.filearchive = self.db.getas(filearchive_structure)
        
    def save_changes(self):
        self.db.commit()
    
    @save
    def addfeed(self, title, url):
        index = self.feeds.find(url=url)
#        XXX: use the index to see if there's already a feed with this url
        self.feeds.append(title=title, url=url)
    
    @save
    def deletefeed(self, url):
        index = self.feeds.find(url=url)
        self.feeds.delete(index)
        
    def get_all_feeds(self):
        return [f.url for f in self.feeds]
    
    def get_feeds(self):
        return self.feeds 
    
    def get_feed_archive(self, feedurl, status=UNREAD):
        if status == READ:
            return self.filearchive.select(feedurl=feedurl)
        if status == UNREAD:
            return self.filearchive.select(feedurl=feedurl, viewed=UNREAD)
        raise ValueError, "Unknown status"
    
    @save
    def archive_file(self, uid, location, url, feedurl, guid):
        self.filearchive.append(uid=uid, 
                                location=location, 
                                url=url, 
                                feedurl=feedurl,
                                guid=guid,
                                viewed=UNREAD, )
        
    def get_archived_by_uid(self, uid):
        index = self.filearchive.find(uid=uid)
        if index != -1:
            return self.filearchive[index]
        return None
    
    def get_archived_by_url(self, url):
        index = self.filearchive.find(url=url)
        if index != -1 :
            return self.filearchive[index]
        return None
    
    def get_archived_by_guid(self, guid):
        index = self.filearchive.find(guid=guid)
        if index != -1 :
            return self.filearchive[index]
        return None
    
    def get_feed_statistics(self, feedurl):
        total = len(self.filearchive.select(feedurl=feedurl))
        unread = len(self.filearchive.select(feedurl=feedurl, viewed=UNREAD))
        return (total, unread)
    
    @save
    def set_viewed(self, filepath):
        index = self.filearchive.find(location=filepath)
        if index > -1:
            v = self.filearchive[index].viewed
            self.filearchive[index].viewed = READ
            logger.debug("Viewed status is %s for %s" % (v, filepath))
            return v
        return None
    
    @save
    def edit_feed(self, feedurl, **kw):
        feeds = self.feeds.select(url=feedurl)
        for feed in feeds:
            feed.title = kw['title']
            
    def get_statistics(self):
        feeds = len(self.feeds)
        files = len(self.filearchive)
        unread = len(self.filearchive.select(viewed=UNREAD))
        read = files - unread
        return feeds, files, unread, read
    
    def have_feed(self, feedurl):
        index = self.feeds.find(url=feedurl)
        if index != -1 :
            return True
        return False

database = Storage(DB_PATH)