from __future__ import absolute_import
import logging
import os
import datetime
from pkgr.repository.metalink import MetaLink
from pkgr.repository.repomd import RepositoryMetadata
from pkgr.repository.database import RepositoryDatabase
from pkgr.util import mkdir_p
from urlgrabber.grabber import URLGrabber, URLGrabError
from pkgr.util import decompress_chunked

from pkgr.repository.groups import Groups
logger = logging.getLogger('pkgr.repository.repository')

import os
def touch(fname):
    if os.path.exists(fname):
        os.utime(fname, None)
    else:
        open(fname, 'w').close()
        
class Repository(object):
    _copy_local = True
    _url = None
    _url_type = None
    _repomd = None
    _cache_dir = None
    current_mirror = None
    metadata = None
    database = None
    groups = None
    groups_enabled = False
    
    def __init__(self, name):
        self.name = name
        self.mirrors = []
        
    def set_url(self, url, type):
        self._url = url
        self._url_type = type
        
    def get_url(self):
        return self._url
        
    def add_mirror(self, url):
        self.mirrors.append(url)
        if not self.current_mirror:
            self.current_mirror = url
        
    def get_mirrors(self):
        return self.mirrors
        
    def get_current_mirror(self):
        return self.current_mirror
      
    def set_cachedir(self, path):
        self._cache_dir = path
        
    def join_cache_dir(self, *args):
        return os.path.join(self._cache_dir, *args)
        
    _expires = None
    
    def set_expires(self, timedelta):
        self._expires = timedelta
     
    def get_expires(self):
        return self._expires
        
    def download(self, url, filename=None):
        #prog = urlgrabber.progress.text_progress_meter()
        #g = URLGrabber(progress_obj=prog)
        g = URLGrabber()
        r =  g.urlgrab(url, filename=filename, copy_local=1)
        touch(r)
        return r
    def get_next_mirror(self):
        for i, mirror in enumerate(self.mirrors):
            yield mirror
        raise Exception("No more mirrors to try for repo %s" % self.get_name())
        
    def get_urltype(self):
        return self._url_type
        
    def get_name(self):
        return self.name
    
    def set_enable_groups(self, b):
        self.groups_enabled = b                 
        
    def get_groups_enabled(self):
        return self.groups_enabled
        
    def get_repomd(self):
        if self._repomd is None:
            pass
    
    def is_expired(self):
        cache_file = self.join_cache_dir('repomd.xml')
        
        if not os.path.exists(cache_file):
            return True
        
        if self.get_expires() is None:
            return True
            
        expiretd = self.get_expires()
        
        now = datetime.datetime.now()
        then = datetime.datetime.fromtimestamp(os.path.getmtime(cache_file))
        
        tdelta = now - then
        seconds = tdelta.total_seconds()
        
        expiredseconds = expiretd.total_seconds()
        
        if seconds > expiredseconds:
            logger.log(5, 'Repomd expired by %s seconds. Expiration: %s', seconds - expiredseconds, str(expiretd))
            return True
         
        logger.log(5, 'Repomd not expired. Expiration: %s. %s to go.',  str(expiretd), datetime.timedelta(seconds=expiredseconds-seconds))
        return False
        
    def init(self):
        # create the cache dir
        
        if not os.path.exists(self.join_cache_dir()):
            pass
        
        mkdir_p(self.join_cache_dir())
        
        if self.get_urltype() == "metalink":
            logger.debug("Downloading repo metalink for %s" % self.get_name())
            
            # handle link list
            metalinkcache = self.join_cache_dir('metalink.xml') 
            
            if not os.path.exists(metalinkcache):
                pass
                # TODO: expires check here
                #if not os.path.exists(metalinkcache):
                data = self.download(self.get_url(), metalinkcache)
                logger.debug('Got metalink %s', metalinkcache)
                
            data = open(metalinkcache).read()
            ml = MetaLink(data)
            
            for url in ml.get_urls():
                self.add_mirror(os.path.dirname(os.path.dirname(url)))
        else:
            self.add_mirror(self.get_url())
            
        repomddata = repomdcache = self.join_cache_dir('repomd.xml')
        repomdexpired = self.is_expired()
        
        if repomdexpired:
            # loop over all urls until we find a satisfactory repomd.xml
            for mirrorbase in self.get_next_mirror():
                try:
                    repomdurl = mirrorbase + '/repodata/repomd.xml'
                    logger.debug('Trying next mirror %s', mirrorbase)
                    repomddata = self.download(repomdurl, repomdcache)
                    logger.debug('Downloaded repo metadata to %s' % repomddata)
                    break
                except URLGrabError, e:
                    logger.error("Error downloading repo md from %s" % repomdurl )
                    logger.error(e)
                    #e.strerror
                    #e.url
                    #e.message
                    #e.errno
                    #e.filename
                    #e.args
                    #logger.error(dir(e))
                    #logger.exception(e)
                
        repomd = self.metadata = RepositoryMetadata()
        self.metadata.load_file(repomddata)
        
        # once we get a repomd grab each of these data
        # group
        # primary_db
        # filelists_db
        
        #tries = 0
        #while tries < 2:
        if True:
            metadatafiles = {}
            for compressed, md in [(True, 'primary_db'), (False, 'group')]:
                metadatafiles[md] = self.get_metadata(repomd, md, compressed)
                    
            self.database = RepositoryDatabase(self, repomd)
            try:
                #tries += 1
                self.database.load_primarydb(metadatafiles['primary_db'])
                #break
            except sqlite3.DatabaseError, e:
                raise Exception("Database error. Please run `pkgr clean`.")
                logger.error('Db error: %s. %s retrying', e, metadatafiles['primary_db'])
                mdfile = repomd.get_data_file('primary_db')
                localmdcache = self.join_cache_dir(mdfile)
                logger.error('Removing %s', localmdcache)
                os.unlink(localmdcache)
                pass
                
        self.groups = Groups.from_xml(open(metadatafiles['group']).read())
        
    def get_metadata(self, repomd, type, decompress):
        mdfile = repomd.get_data_file(type)
        localmdcache = self.join_cache_dir(mdfile)
        mdurl = self.get_current_mirror()+'/'+ mdfile
        
        expired = False
        # TODO: expires check here
        if not os.path.exists(localmdcache):
            expired = True
            
        if not os.path.exists(os.path.dirname(localmdcache)):
            mkdir_p(os.path.dirname(localmdcache))
            
        if expired:   
            logger.info('Local cache of %s (%s) expired. Downloading %s', type, localmdcache, mdurl)
            
            # TODO: loop attempts at getting the metadata
            self.download(mdurl, localmdcache)
        
        if decompress:
            dest = localmdcache.rsplit('.', 1)[0]
            logger.log(5, 'Decompressing to %s %s', localmdcache, dest)
            
            if not os.path.exists(dest):
                decompress_chunked(localmdcache, dest, 'bz2')
        else:
            dest = localmdcache
        
        return dest
        
import sqlite3
