import logging
import os
import shutil
from urllib import FancyURLopener
import urlparse
import socket
from oriolustrotter.core import Fetcher, FetchError

class FetchAbortedError(FetchError):
    pass

class PersistentFetcher(Fetcher):
    """
    Fetcher with ability to resume downloading of a file that has been partially downloaded from a web server.
    This fetcher uses range header.
    """

    class __ModifiedURLOpener(FancyURLopener):
        """

        """
        def http_error_206(self, url, fp, errcode, errmsg, headers, data=None):
            pass

        def prompt_user_passwd():
            pass
            
        def http_error_404(self, url, fp, errcode, errmsg, headers, data=None):
            raise FetchError("File '%s' not found!" % url) 

    def __init__(self, cache, timeout = 60):
        Fetcher.__init__(self, cache)
        self.logger = logging.getLogger("PersistentFetcher")
        self.logger.debug("init fetcher: cache - '%s', timeout = %d", self.cache, timeout)
        socket.setdefaulttimeout(timeout)

    def __get(self, url):
        opener = self.__ModifiedURLOpener()
        tmp_file = None
        remote_file = None

        url_parts = urlparse.urlparse(url)

        filename = os.path.basename(url_parts.path)
        cached_filename = os.path.join(self.cache, filename + ".tmp")
        output_filename = os.path.join(self.cache, filename)
        self.logger.debug("cached_filename = %s", cached_filename)
        self.logger.debug("output_filename = %s", output_filename)
        try:
            if os.path.exists(cached_filename) and url_parts.scheme == "http":
                exist_size = os.path.getsize(cached_filename)
                tmp_file = open(cached_filename, "ab")
            else:
                tmp_file = open(cached_filename, "wb")
                exist_size = 0

            opener.addheader("Range", "bytes=%s-" % exist_size)
            total_size = 0
            try:
                remote_file = opener.open(url)
                self.logger.info("Open URL '%s'." % url)
            except IOError, (errno, strerror):
                message = "Unable to open URL '%s'." % url
                self.__logerror__(self.logger, message)
                raise FetchError(message)

            if int(remote_file.headers["Content-Length"]) != exist_size:
                while True:
                    try:
                        data = remote_file.read(8192)
                    except:
                        message = "Unable to read data from remote server. Readed %d bytes." % total_size
                        self.__logerror__(self.logger, message)
                        raise FetchAbortedError(message)

                    total_size = total_size + len(data)
                    if not data:
                        break
                    try:
                        tmp_file.write(data)
                    except IOError, (errno, strerror):
                        message = "Unable to write data to local file: %s; errno = %s: strerror = %s" % (output_filename, errno, strerror)
                        self.__logerror__(self.logger, message)
                        raise FetchError(message)
        finally:
            if tmp_file:
                tmp_file.close()
            if remote_file:
                remote_file.close()

        self.logger.info("Total bytes read: %s", total_size)
        shutil.move(cached_filename, output_filename)

        return output_filename

    def get(self, url, login = None, password = None):
        # TODO Implement login and password to access to repository
        for i in range(self.ATTEMPTS):
            try:
                self.logger.debug("Starting attempt #%s" % i)
                return self.__get(url)
            except FetchAbortedError:
                self.logger.debug("Attempt #%s failed." % i)
                continue
            except:
                raise
        raise FetchError("Unable to fetch file.")    