from __future__ import absolute_import
from pkgr.repo import RepoGeneric
import re
import solv
import logging
import os
import urlparse
import tempfile
import pycurl
import subprocess
import sys
import progressbar

logger = logging.getLogger('pkgr.repos.Remote')
logger.setLevel(logging.INFO)
class MaxDownloadAttempts(Exception):
    pass

import bz2
def decompress_chunked(source, dest, ztype):

    #if ztype not in _available_compression:
    #    msg = "%s compression not available" % ztype
    #    raise Errors.MiscError, msg

    if ztype == 'bz2':
        s_fn = bz2.BZ2File(source, 'r')
    #elif ztype == 'xz':
    #    s_fn = lzma.LZMAFile(source, 'r')
    #elif ztype == 'gz':
    #    s_fn = gzip.GzipFile(source, 'r')

    destination = open(dest, 'w')
    read = 0
    while True:
        try:
            data = s_fn.read(1024000)
        except IOError:
            break
        if not data: break
        read += len(data)
        try:
            destination.write(data)
        except (OSError, IOError), e:
            #msg = "Error writing to file %s: %s" % (dest, str(e))
            #raise Errors.MiscError, msg
            raise

    logger.debug('Decompressed %s bytes', read)

    destination.close()
    s_fn.close()

class RepoRemote(RepoGeneric):

    _mirror_gen = None
    _nincs = 0

    _url = None
    mirrors = None

    def set_url(self, type, url):
        """ Sets the url.

            Args:
                type(str): One of url, mirrorlist or metalink.
                url(str): The url
        """
        self._url = (type, url)
        # HACK for the old dict basing!!!
        #self[type] = url

    def get_url(self):
        """ Returns this repos base url.
        """
        if self._url is None:
            raise Exception("Url for repo %s isnt set yet " % self)

        return self._url[1]

    def get_urltype(self):
        """ Returns this repos base url type.
        """
        if self._url is None:
            raise Exception("Url for repo %s isnt set yet " % self)

        return self._url[0]

    def _get_mirrorgenerator(self):
        if not self._mirror_gen:
            self._mirror_gen = self._mirrorgenerator()
        return self._mirror_gen

    def _mirrorgenerator(self):
        if not self.mirrors:
            raise StopIteration
        for mirror in self.mirrors:
            yield mirror

    _current_mirror = None
    def _next_mirror(self):
        if self._current_mirror is None:
            try:
                self._current_mirror = self._get_mirrorgenerator().next()
            except StopIteration:
                self._current_mirror = None
        return self._current_mirror

    def _move_to_next_mirror(self):
        if self._nincs > 10:
            raise MaxDownloadAttempts("incremented mirrors for repo greater than thresh %s" % self)
        try:
            self._current_mirror = self._get_mirrorgenerator().next()
        except StopIteration:
            self._current_mirror = None
        self._nincs += 1

    def load_url_list(self, urls):
        if not urls:
            return

        logger.debug('%s: Added %s mirror urls', self, len(urls))
        self.mirrors = urls

        url = urls[0]
        short = re.sub(r'^(.*?/...*?)/.*$', r'\1', url)
        logger.debug('%s: Using mirror: %s', self, short)
        #self['baseurl'] = url

    def has_mirrors(self):
        return self.mirrors is not None

    def load_metalink(self, metalink):
        logger.debug('%s: Load metalink %s', self, metalink)

        nf = self.downloadnomirror(metalink, False, None)
        if not nf:
            raise Exception("Downloading metalink failed %s" % metalink)
            return None
        f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
        solv.xfclose(nf)
        urls = []
        chksum = None
        for l in f.readlines():
            l = l.strip()
            m = re.match(r'^<hash type="sha256">([0-9a-fA-F]{64})</hash>', l)
            if m:
                chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256, m.group(1))
            m = re.match(r'^<url.*>(https?://.+)repodata/repomd.xml</url>', l)
            if m:
                urls.append(m.group(1))
        if not urls:
            chksum = None       # in case the metalink is about a different file
        f.close()
        self.load_url_list(urls)
        return chksum

    def load_mirrorlist(self, mirrorlist):
        #print 'Set from mirrorlis', mirrorlist
        logger.debug('Set from mirrorlist %s', mirrorlist)
        nf = self.downloadnomirror(mirrorlist, False, None)
        if not nf:
            return
        f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
        #print '@@@@@@@@@@@@@@@ ', os.dup(solv.xfileno(nf)) #solv.xfileno(nf)
        solv.xfclose(nf)
        urls = []
        for l in f.readlines():
            l = l.strip()
            #print 'AAAA ', l, type(l)
            #print l[0:7], l[0:7], l[0:7] == 'http://' or l[0:8] == 'https://'
            #print '________________'
            if l[0:7] == 'http://' or l[0:8] == 'https://':
                #print 'done'
                urls.append(l)
        #print '############### ', urls
        self.load_url_list(urls)
        f.close()

    def downloadnomirror(self, file, uncompress, chksum, markincomplete=False):
        """ Used to download metalinks and mirrorlists.. doesnt use the mirrorgen loop.
        """
        logger.debug('NOMIRROR Download %s. Decompress: %s. Checksum: %s. Mark incomplete: %s', file, uncompress, chksum, markincomplete)
        url = file
        logger.debug('Start download %s...', url)

        f = tempfile.TemporaryFile()
        st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
        if os.lseek(f.fileno(), 0, os.SEEK_CUR) == 0 and (st == 0 or not chksum):
            return None
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        if st:
            raise Exception("%s: download error %d" % (file, st))
        if chksum:
            fchksum = solv.Chksum(chksum.type)
            if not fchksum:
                raise Exception("%s: unknown checksum type" % file)
            fchksum.add_fd(f.fileno())
            if fchksum != chksum:
                raise Exception("%s: checksum mismatch" % file)
        if uncompress:
            return solv.xfopen_fd(file, os.dup(f.fileno()))
        return solv.xfopen_fd(None, os.dup(f.fileno()))

    def download_repo_file(self, url, checksum=None, location=None):
        """ Downloads a file to location.

            Args:
                url: url from the mirror base url
                decompress: decompresses the file before returning
                checksum: checks the file meets checksum. otherwise retry from a different mirror.
                location: location to store the file. defaults to a tempfile.
        """

        current_mirror = self._get_current_mirror()
        mirrorhost = urlparse.urlsplit(current_mirror).hostname
        logger.debug('%s: download file %s [%s]', self, url, mirrorhost)
        logger.log(5, 'Url: %s', url)
        logger.log(5, 'Expected checksum: %s', checksum)

        if not current_mirror:
            raise Exception("%s: no baseurl" % self.get_name())

        url = os.path.join(current_mirror, url)

        while True:
            #logger.debug('Start download %s...', url)
            current_mirror = self._next_mirror()
            #logger.debug('current mirror %s', current_mirror)

            if location:
                f = open(location, 'wb+r')
            else:
                f = tempfile.NamedTemporaryFile(mode="r+wb")
            try:
                downloaded = self._download_curl(url, f)
                if not downloaded:
                    raise Exception("unknown download error")
            except Exception,e :
                logger.warn('Download error %s %s', url, e)
                self._move_to_next_mirror()
                logger.warn('Trying next mirror %s', self._next_mirror())
                continue

            f.seek(0)
            if checksum:
                fchksum = solv.Chksum(checksum.type)
                if not fchksum:
                    logger.warn('unknown checksum type. from mirror: %s.... %s', url, file)
                    self._move_to_next_mirror()
                    logger.warn('Trying next mirror %s', self._next_mirror())
                    continue
                fchksum.add_fd(f.fileno())
                if fchksum != checksum:
                    logger.warn('checksum mismatch. from mirror: %s %s != %s... %s', url, fchksum, checksum, file)
                    self._move_to_next_mirror()
                    logger.warn('Trying next mirror %s', self._next_mirror())
                    continue

            return f

    def _get_current_mirror(self):
        if not self.has_mirrors():
            logger.debug('Repo %s has no mirrors.', self.get_name())
            if self.get_urltype() == "metalink":
                metalinkchksum = self.load_metalink(self.get_url())
                # XXX: we are not believing the chksum from the metalink as it was returning errors (web caches?)
                # use the checksum from the metalink
                #if file == 'repodata/repomd.xml' and metalinkchksum and not chksum:
                #    chksum = metalinkchksum

            elif self.get_urltype() == "mirrorlist":
                self.load_mirrorlist(self.get_url())
            elif self.get_urltype() == "url":
                self.load_url_list([self.get_url()])


        current_mirror = self._next_mirror()
        return current_mirror

    def download_simple(self, file, uncompress):
        current_mirror = self._get_current_mirror()
        mirrorhost = urlparse.urlsplit(current_mirror).hostname
        logger.debug('Download SIMPLE %s mirror %s', file, mirrorhost)

        if not current_mirror:
            raise Exception("%s: no baseurl" % self.get_name())

        url = os.path.join(current_mirror, file)

        while True:
            logger.debug('Start download %s...', url)
            current_mirror = self._next_mirror()
            #logger.debug('current mirror %s', current_mirror)

            f = tempfile.NamedTemporaryFile(mode="wb", delete=False)
            downloaded = self._download_curl(url, f)

            if not downloaded:
                print "%s: download error %d" % (file, st)
                logger.warn('Download error %s %s', file, st)
                self._move_to_next_mirror()
                logger.warn('Trying next mirror %s', self._next_mirror())
                continue

            if uncompress:
                dest = f.name.rsplit('.', 1)[0]
                decompress_chunked(f.name, dest, 'bz2')
                return dest

            return f.name

    def xlat_filehandle(self, f, file=None, uncompress=None):
        """ TODO: move to pkgr.Pkgr
        """
        if uncompress and file:
            return solv.xfopen_fd(file, os.dup(f.fileno()))
        return solv.xfopen_fd(None, os.dup(f.fileno()))

    def _default_start_handler(self, repo, url):

        pbar = progressbar.ProgressBar(widgets=widgets, maxval=10000000).start()
        for i in range(1000000):
          # do something
          pbar.update(10*i+1)
        pbar.finish()

    pbar = None
    def _progress(self, url, download_t, download_d, upload_t, upload_d):
        #print "Total to download", download_t
        #print "Total downloaded", download_d
        #print "Total to upload", upload_t
        #print "Total uploaded", upload_d

        if not self.pbar and download_t > 0 and download_d != download_t:
            urlsplit = urlparse.urlsplit(url)
            basename = os.path.basename(urlsplit.path)
            widgets = [basename, progressbar.Percentage(), ' ', progressbar.Bar(),
           ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed()]
            self.pbar = progressbar.ProgressBar(widgets=widgets, maxval=download_t, term_width=80).start()
        if self.pbar and download_d < self.pbar.maxval:
           self.pbar.update(download_d)
        elif self.pbar and (download_d >= self.pbar.maxval or download_d == download_t):
           self.pbar.finish()
           self.pbar = None

    def _download_curl(self, url, fp):
        logger.info('Downloading using curl..')
        def writer(buf):
            fp.write(buf)

        self.current_url = url

        # a wrapper so we can pass url/name to the download function
        def _progress(download_t, download_d, upload_t, upload_d):
            self._progress(url, download_t, download_d, upload_t, upload_d)

        curl = pycurl.Curl()
        curl.setopt(pycurl.URL, url)
        curl.setopt(pycurl.FOLLOWLOCATION, 1)
        curl.setopt(pycurl.MAXREDIRS, 5)
        curl.setopt(pycurl.CONNECTTIMEOUT, 30)
        curl.setopt(pycurl.TIMEOUT, 300)
        #makes downloading thread-safe
        curl.setopt(pycurl.NOSIGNAL, 1)
        #curl.setopt(pycurl.NOSIGNAL, 1)
        curl.setopt(pycurl.NOPROGRESS, 0)
        curl.setopt(pycurl.PROGRESSFUNCTION, _progress)
        curl.setopt(pycurl.WRITEFUNCTION, writer)
        #curl.setopt(pycurl.WRITEDATA, fp)

        try:
            curl.perform()
            return True
        except:
            import traceback
            traceback.print_exc(file=sys.stderr)
            return False
        finally:
            curl.close()


    def download(self, file, uncompress, chksum, markincomplete=False):
        logger.debug('Download CLASSIC %s. Decompress: %s. Checksum: %s. Mark incomplete: %s', file, uncompress, chksum, markincomplete)

        current_mirror = self._get_current_mirror()

        current_mirror = self._next_mirror()

        if not current_mirror:
            raise Exception("%s: no baseurl" % self.get_name())

        url = os.path.join(current_mirror, file)

        while True:
            logger.debug('Start download %s...', url)
            current_mirror = self._next_mirror()
            #logger.debug('current mirror %s', current_mirror)

            f = tempfile.TemporaryFile()
            st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
            if os.lseek(f.fileno(), 0, os.SEEK_CUR) == 0 and (st == 0 or not chksum):
                raise Exception("curl download didnt return anything")
            os.lseek(f.fileno(), 0, os.SEEK_SET)

            if st:
                print "%s: download error %d" % (file, st)
                logger.warn('Download error %s %s', file, st)
                self._move_to_next_mirror()
                logger.warn('Trying next mirror %s', self._next_mirror())
                continue
            if chksum:
                fchksum = solv.Chksum(chksum.type)
                if not fchksum:
                    logger.warn('unknown checksum type. from mirror: %s.... %s', url, file)
                    self._move_to_next_mirror()
                    logger.warn('Trying next mirror %s', self._next_mirror())
                    continue
                fchksum.add_fd(f.fileno())
                if fchksum != chksum:
                    logger.warn('checksum mismatch. from mirror: %s %s != %s... %s', url, fchksum, chksum, file)
                    self._move_to_next_mirror()
                    logger.warn('Trying next mirror %s', self._next_mirror())
                    continue
            if uncompress:
                return solv.xfopen_fd(file, os.dup(f.fileno()))
            return solv.xfopen_fd(None, os.dup(f.fileno()))
