################################################################################
#   Copyright 2010 Jason Hoover
#
#
#   This file is part of AutoCache.
#
#   AutoCache is free software: you can redistribute it and/or modify
#   it under the terms of the GNU Lesser General Public License as published by
#   the Free Software Foundation, either version 3 of the License, or
#   (at your option) any later version.
#
#   AutoCache is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#   GNU General Public License for more details.

#   You should have received a copy of the GNU Lesser General Public License
#   along with AutoCache.  If not, see <http://www.gnu.org/licenses/>.
################################################################################


################################################################################
# ac_download - Sets up the ac_download function.
#
# The function of this file is to define a function named ac_download, which
# takes three arguements: source URL, download_directory and the download_fn.
# These arguements are so-named from the original ac_update function. 
# download_dir should have a trailing slash in it.
#
# Function Name   Description           Usage                            Returns
#
# ac_download     Download executor     ac_download (url,path,filename)  Nothing
# ac_dlcmd        Download command      ac_download (url,path,filename)  Nothing
################################################################################

from subprocess import call
from time import sleep

from ac_common import ac_settings,ac_log,ac_mknice

################################################################################
# ac_dlcmd    - Set up the ac_dlcmd function. It's really just a stub function
# put in place to prevent duplication of code.
################################################################################

if ac_settings["dl_program"] == "wget":
 
    # ([
    #     <path to wget>,
    #     <specify a destination filename, just to be safe.>
    #     <The rate limit in kb's. Zero implies no rate limit.>
    #     <The output logfile for wget.>
    #     <The dl_timeout value.>
    #     <The URL>
    #     <Disable retries>
    # ]
    # <ac_mknice, to make wget have a niceness>
    # <env variables, to set proxy>
    # )

    def ac_dlcmd (url, download_dir, download_fn):
        return call(
            [
                ac_settings["dl_exec"],
                "--output-document=" + download_dir + download_fn,
                "--limit-rate=" + str(ac_settings["dl_bwlimit"]) + "k",
                "--output-file=" + download_dir + "download.log",
                "--timeout=" + str(ac_settings["dl_timeout"]),
                "--tries=0",
                "--continue",
                url,
            ],
            preexec_fn = ac_mknice,
            env = { "http_proxy" : ac_settings["dl_proxy"] }
            )

    # Exit codes for wget.
    # 1 - Generic Failure.
    # 2 - Parse Error
    # 3 - File I/O
    # 4 - Network Failure.
    # 5 - SSL Verification Failure
    # 6 - Username/password failure.
    # 7 - Protocol errors (?)
    # 8 - Server issued error response.

    ac_settings["dl_tempfail"]=(4,7,8)

elif ac_settings["dl_program"] == "curl":

    # Curl version.
    # (  [
    #    <path to curl>,
    #    <specify a destination filename, just to be safe.>
    #    <The rate limit in kb's. Zero implies no rate limit.>
    #    <The output logfile for wget.>
    #    <The dl_timeout value.>
    #    <The URL>
    #    <Auto-continue>
    #    ]
    #    <ac_mknice, to make wget have a niceness>
    #    <env variables, to set proxy>
    # )

    def ac_dlcmd (url, download_dir, download_fn):
        return call(
            [
                ac_settings["dl_exec"],
                "--output " + download_dir + download_fn,
                "--limit-rate " + str(ac_settings["dl_bwlimit"]) + "k",
                "--stderr " + download_dir + "download.log",
                "--connect-timeout " + str(ac_settings["dl_timeout"]),
                "--continue-at -",
                url,
            ],
            preexec_fn = ac_mknice,
            env = { "http_proxy" : ac_settings["dl_proxy"] }
            )

    # Because curl has 88(!) return codes, only semi-temporary network related
    # failures are documented/defined here.
    #
    # 5 - Couldn't resolve proxy. The given proxy host could not be resolved.
    # 6 - Couldn't resolve host. The given remote host was not resolved.
    # 7 - Failed to connect to host.
    # 18 - Partial file. Only a part of the file was transferred.	
    # 22 - HTTP page not retrieved. The requested url was not found or returned 
    #      another error with the HTTP error code being 400 or above. This 
    #      return code only appears if -f/--fail is used.
    # 28 - Operation timeout. The specified time-out period was reached
    #      according to the conditions.
    # 33 - HTTP range error. The range "command" didn't work.
    # 45 - Interface error. A specified outgoing interface could not be used.
    # 47 - Too many redirects. When following redirects, curl hit the maximum 
    #      amount.
    # 52 - The server didn't reply anything, which here is considered an error.
    # 55 - Failed sending network data.
    # 56 - Failure in receiving network data.
    # 61 - Unrecognized transfer encoding.
    # 78 - The resource referenced in the URL does not exist.

    ac_settings["dl_tempfail"]=(5,6,7,18,22,28,33,45,47,52,55,56,61,78)


elif ac_settings["dl_program"] == "fetch":

    # Fetch version.
    #      [
    #      <path to curl>,
    #      <specify output>,
    #      <specify timeout>,
    #      <resume partial files>
    #      <url>
    #      ]
    #    <log output>
    #    <make nice>
    #    <proxy>
    # Minor notes about fetch:
    #    - No throttling.
    #    - stderr has to have a file descriptor redirect.
 

    def ac_dlcmd (url, download_dir, download_fn):
        return call(
            [
                ac_settings["dl_exec"],
                "-o " + download_dir + download_fn,
                "-T " + str(ac_settings["dl_timeout"]),
                "-r",
                url,
            ],
            stderr = file(download_dir+"download.log",'a+w'),
            preexec_fn = ac_mknice,
            env = { "http_proxy" : ac_settings["dl_proxy"] }
            )

    # Unfortunately, fetch only has two return codes, sucesss or failure. This
    # means we must assume any not-sucessful exit is temporary.

    ac_settings["dl_tempfail"]=(1)


elif ac_settings["dl_program"] == "aria2c":

    # aria2 commandline version.
    # Broken down:
    # call([
    #    <path to aria2c>,
    #    <ignore any user aria.conf files>
    #    <specify a destination filename, just to be safe.>
    #    <The rate limit in kb's. Zero implies no rate limit.>
    #    <The output logfile for wget.>
    #    <The dl_timeout value.>
    #    <The URL>
    #    <Disable retries>
    #    <Resume a partial file.>
    #    ]
    #    <ac_mknice, to make wget have a niceness>
    #    <env variables, to set proxy>
           
    def ac_dlcmd (url, download_dir, download_fn):
        return call(
            [
                ac_settings["dl_exec"],
                "--no-conf=true",
                "--out=" + download_dir + download_fn,
                "--max-download-limit=" + str(ac_settings["dl_bwlimit"]) + "K",
                "--log=" + download_dir + "download.log",
                "--connect-timeout=" + str(ac_settings["dl_timeout"]),
                "--max-tries=0",
                "--continue=true",
                url,
            ],
            preexec_fn = ac_mknice,
            env = { "http_proxy" : ac_settings["dl_proxy"] }
            )

    # Aria2c has 30 return codes. Only temporary failures are noted here.
    # 1 - If an unknown error occurred.
    # 2 - If a time out occured.
    # 3 - If a resource was not found.
    # 4 - If aria2 saw the specfied number of "resource not found" error. See --max-file-not-found option). 
    # 6 - Network problem.
    # 8 - If remote server did not support resume when resume was required to complete download.
    # 19 - Mame resolution failed.
    # 22 - HTTP response header was bad or unexpected. 
    # 23 - Too many redirects.
    # 24 - Auth failed.
    # 29 - Server busy.

    ac_settings["dl_tempfail"]=(1,2,3,4,6,8,19,22,23,24,29)


################################################################################
# ac_download - Actually runs the ac_dlcmd. Probably an unecessary abstraction,
# but handles the result codes and retries accordingly.
################################################################################

def ac_download(url, download_dir, download_fn):
    ac_log.debug("Starting dl_exec for: " + url + " in " + download_dir)

    # Set the retry count and try a download without entering the retry loop.
    triesremaining=int(ac_settings["dl_retry"])
    exitcode = ac_dlcmd(url, download_dir, download_fn)

    # If the downloader fails with a soft error code, reduce the retries, log
    # an error and try again.

    while (exitcode in ac_settings["dl_tempfail"]) and \
          (triesremaining > 0):
    
        triesremaining -= 1
        ac_log.warning("Download of " + url + " failed! Exit code: " 
                     + str(exitcode) + " Tries left: "
                     + str(triesremaining)
                     )
        sleep(ac_settings["dl_timeout"])
        exitcode = ac_dlcmd(url, download_dir, download_fn)

    return exitcode
