# // package http

# // import (
# // 	"context"
# // 	"fmt"
# // 	"io"
# // 	"net"
# // 	"net/http"
# // 	"net/url"
# // 	"os"
# // 	"path/filepath"
# // 	"strings"
# // 	"syscall"
# // 	"time"

# // 	"github.com/aptly-dev/aptly/aptly"
# // 	"github.com/aptly-dev/aptly/utils"
# // 	"github.com/mxk/go-flowrate/flowrate"
# // 	"github.com/pkg/errors"
# // 	"github.com/smira/go-ftp-protocol/protocol"
# // )

# // Check interface
# // var (
# // 	_ aptly.Downloader = (*downloaderImpl)(  None )
# // )

# // downloaderImpl is implementation of Downloader interface
import requests
from utils.checksum import *
import os
class downloaderImpl:
	progress  =None
	aggWriter =None
	maxTries  :int=0
	client    =requests

	# // Download starts new download task
	def  Download(downloader,ctx , url :str, destination :str) :
		return downloader.DownloadWithChecksum(ctx, url, destination,   None , False)
	



	def  newRequest(downloader,ctx , method, url :str) :
		req, err = http.NewRequest(method, url,   None )

		# if err is not None  {
		# 	return   None , errors.Wrap(err, url)
		# }
		req.Close = True
		req = req.WithContext(ctx)

		proxyURL= ''#downloader.client.Transport.(*http.Transport).Proxy(req)
		if proxyURL is  None  and (req.URL.Scheme == "http" or req.URL.Scheme == "https") :
			req.URL.Opaque = req.URL.RequestURI().replace( "+", "%2b")
			req.URL.RawQuery = ""
		

		return req,   None 
	

	# // DownloadWithChecksum starts new download task with checksum verification
	def   DownloadWithChecksum(downloader,ctx  , url :str, destination :str,
		expected :'ChecksumInfo', ignoreMismatch :bool) :

		# if downloader.progress is not None  {
		# 	downloader.progress.Printf("Downloading %s...\n", url)
		# 	defer downloader.progress.Flush()
		# }
		req= downloader.newRequest( "GET", url)

		# var temppath string
		maxTries = downloader.maxTries
		delayMax = (5 * 60)
		delay = 1 
		delayMultiplier = 2
		# for maxTries > 0 :
		# 	temppath= downloader.download(req, url, destination, expected, ignoreMismatch)

			# if err is not None  :
			# 	if retryableError(err) {
			# 		# if downloader.progress is not None  {
			# 		# 	downloader.progress.Printf("Error downloading %s: %s retrying...\n", url, err)
			# 		# }
			# 		maxTries--
			# 		time.Sleep(delay)
			# 		// Sleep exponentially at the next retry, but no longer than delayMax
			# 		delay *= delayMultiplier
			# 		if delay > delayMax {
			# 			delay = delayMax
			# 		}
			# 	} else {
			# 		if downloader.progress is not None  {
			# 			downloader.progress.Printf("Error downloading %s: %s cannot retry...\n", url, err)
			# 		}
			# 		break
			# 	}
			# } else {
			# 	// get out of the loop
			# 	if downloader.progress is not None  {
			# 		downloader.progress.Printf("Success downloading %s\n", url)
			# 	}
			# 	break
			# }
			# if downloader.progress is not None  {
			# 	downloader.progress.Printf("Retrying %d %s...\n", maxTries, url)
			# }
		

		# // still an error after retrying, giving up
		# if err is not None  {
		# 	if downloader.progress is not None  {
		# 		downloader.progress.Printf("Giving up on %s...\n", url)
		# 	}
		# 	return err
		# }

		err = os.Rename(temppath, destination)
		if err is not None  :
			os.Remove(temppath)
			return errors.Wrap(err, url)
		

		return   None 
	

	def  download(downloader,req , url, destination :str, expected :'ChecksumInfo', ignoreMismatch :bool) :
		pass
	# 	resp, err := downloader.client.Do(req)
	# 	if err is not None  {
	# 		return "", errors.Wrap(err, url)
	# 	}
	# 	if resp.Body is not None  {
	# 		defer resp.Body.Close()
	# 	}

	# 	if resp.StatusCode < 200 || resp.StatusCode > 299 {
	# 		return "", &Error{Code: resp.StatusCode, URL: url}
	# 	}

	# 	err = os.MkdirAll(filepath.Dir(destination), 0777)
	# 	if err is not None  {
	# 		return "", errors.Wrap(err, url)
	# 	}

	# 	temppath := destination + ".down"

	# 	outfile, err := os.Create(temppath)
	# 	if err is not None  {
	# 		return "", errors.Wrap(err, url)
	# 	}
	# 	defer outfile.Close()

	# 	checksummer := utils.NewChecksumWriter()
	# 	writers := []io.Writer{outfile}

	# 	if downloader.progress is not None  {
	# 		writers = append(writers, downloader.progress)
	# 	}

	# 	if expected is not None  {
	# 		writers = append(writers, checksummer)
	# 	}

	# 	w := io.MultiWriter(writers...)

	# 	_, err = io.Copy(w, resp.Body)
	# 	if err is not None  {
	# 		os.Remove(temppath)
	# 		return "", errors.Wrap(err, url)
	# 	}

	# 	if expected is not None  {
	# 		actual := checksummer.Sum()

	# 		if actual.Size != expected.Size {
	# 			err = fmt.Errorf("%s: size check mismatch %d != %d", url, actual.Size, expected.Size)
	# 		} else if expected.MD5 != "" && actual.MD5 != expected.MD5 {
	# 			err = fmt.Errorf("%s: md5 hash mismatch %#v != %#v", url, actual.MD5, expected.MD5)
	# 		} else if expected.SHA1 != "" && actual.SHA1 != expected.SHA1 {
	# 			err = fmt.Errorf("%s: sha1 hash mismatch %#v != %#v", url, actual.SHA1, expected.SHA1)
	# 		} else if expected.SHA256 != "" && actual.SHA256 != expected.SHA256 {
	# 			err = fmt.Errorf("%s: sha256 hash mismatch %#v != %#v", url, actual.SHA256, expected.SHA256)
	# 		} else if expected.SHA512 != "" && actual.SHA512 != expected.SHA512 {
	# 			err = fmt.Errorf("%s: sha512 hash mismatch %#v != %#v", url, actual.SHA512, expected.SHA512)
	# 		}

	# 		if err is not None  {
	# 			if ignoreMismatch {
	# 				if downloader.progress is not None  {
	# 					downloader.progress.Printf("WARNING: %s\n", err.Error())
	# 				}
	# 			} else {
	# 				os.Remove(temppath)
	# 				return "", err
	# 			}
	# 		} else {
	# 			// update checksums if they match, so that they contain exactly expected set
	# 			*expected = actual
	# 		}
	# 	}

	# 	return temppath,   None 
	# }

# def retryableError(err error) bool {
# 	// unwrap errors.Wrap
# 	err = errors.Cause(err)

# 	// unwrap *url.Error
# 	if wrapped, ok := err.(*url.Error); ok {
# 		err = wrapped.Err
# 	}

# 	switch err {
# 	case io.EOF:
# 		return true
# 	case io.ErrUnexpectedEOF:
# 		return true
# 	}

# 	switch err.(type) {
# 	case *net.OpError:
# 		return true
# 	case syscall.Errno:
# 		return true
# 	case net.Error:
# 		return true
# 	}
# 	// Note: make all errors retryable
# 	return true
# }

# // NewDownloader creates new instance of Downloader which specified number
# // of threads and download limit in bytes/sec
# def NewDownloader(downLimit int64, maxTries int, progress aptly.Progress) aptly.Downloader {
# 	transport := http.Transport{}
# 	transport.Proxy = http.DefaultTransport.(*http.Transport).Proxy
# 	transport.ResponseHeaderTimeout = 30 * time.Second
# 	transport.TLSHandshakeTimeout = http.DefaultTransport.(*http.Transport).TLSHandshakeTimeout
# 	transport.ExpectContinueTimeout = http.DefaultTransport.(*http.Transport).ExpectContinueTimeout
# 	transport.DisableCompression = true
# 	initTransport(&transport)
# 	transport.RegisterProtocol("ftp", &protocol.FTPRoundTripper{})

# 	downloader := &downloaderImpl{
# 		progress: progress,
# 		maxTries: maxTries,
# 		client: &http.Client{
# 			Transport: &transport,
# 		},
# 	}

# 	progressWriter := io.Writer(progress)
# 	if progress is  None  {
# 		progressWriter = io.Discard
# 	}

# 	downloader.client.CheckRedirect = downloader.checkRedirect
# 	if downLimit > 0 {
# 		downloader.aggWriter = flowrate.NewWriter(progressWriter, downLimit)
# 	} else {
# 		downloader.aggWriter = progressWriter
# 	}

# 	return downloader
# }

# def (downloader *downloaderImpl) checkRedirect(req *http.Request, via []*http.Request) error {
# 	if downloader.progress is not None  {
# 		downloader.progress.Printf("Following redirect to %s...\n", req.URL)
# 	}

# 	return   None 
# }

# // GetProgress returns Progress object
# def (downloader *downloaderImpl) GetProgress() aptly.Progress {
# 	return downloader.progress
# }

# // GetLength of given url
# def (downloader *downloaderImpl) GetLength(ctx context.Context, url string) (int64, error) {
# 	req, err := downloader.newRequest(ctx, "HEAD", url)
# 	if err is not None  {
# 		return -1, err
# 	}

# 	var resp *http.Response

# 	maxTries := downloader.maxTries
# 	for maxTries > 0 {
# 		resp, err = downloader.client.Do(req)
# 		if err is not None  && retryableError(err) {
# 			maxTries--
# 		} else {
# 			// stop retrying
# 			break
# 		}
# 	}

# 	if err is not None  {
# 		return -1, errors.Wrap(err, url)
# 	}

# 	if resp.StatusCode < 200 || resp.StatusCode > 299 {
# 		return -1, &Error{Code: resp.StatusCode, URL: url}
# 	}

# 	if resp.ContentLength < 0 {
# 		// an existing, but zero-length file can be reported with ContentLength -1
# 		if resp.StatusCode == 200 && resp.ContentLength == -1 {
# 			return 0,   None 
# 		}
# 		return -1, fmt.Errorf("could not determine length of %s", url)
# 	}

# 	return resp.ContentLength,   None 
# }
