#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FROM: http://fayaa.com/code/view/58/full/
# Jay modified it a little and save for further potential usage.


'''It is a multi-thread downloading tool

    It was developed following axel.
        Author: volans
        E-mail: volansw [at] gmail.com
'''

import sys
import os
import time
import urllib
import requests
from threading import Thread
import argparse
import urlparse


class AxelPython(Thread, urllib.FancyURLopener):
    '''Multi-thread downloading class.

        run() is a vitural method of Thread.
    '''

    def __init__(self, threadname, url, filename, ranges=0, proxies={}):
        Thread.__init__(self, name=threadname)
        urllib.FancyURLopener.__init__(self, proxies)
        self.name = threadname
        self.url = url
        self.filename = filename
        self.ranges = ranges
        self.downloaded = 0

    def run(self):
        '''vertual function in Thread'''
        try:
            self.downloaded = os.path.getsize(self.filename)
        except OSError:
            #print 'never downloaded'
            self.downloaded = 0

        # rebuild start poind
        self.startpoint = self.ranges[0] + self.downloaded

        # This part is completed
        if self.startpoint >= self.ranges[1]:
            print 'Part %s has been downloaded over.' % self.filename
            return

        self.oneTimeSize = 16384  # 16kByte/time
        # print 'task %s will download from %d to %d' % (self.name, self.startpoint, self.ranges[1])

        self.addheader("Range", "bytes=%d-%d" % (self.startpoint, self.ranges[1]))
        self.urlhandle = self.open(self.url)

        data = self.urlhandle.read(self.oneTimeSize)
        while data:
            filehandle = open(self.filename, 'ab+')
            filehandle.write(data)
            filehandle.close()

            self.downloaded += len(data)
            #print "%s" % (self.name)
            #progress = u'\r...'

            data = self.urlhandle.read(self.oneTimeSize)


def GetUrlFileSize(url, proxies={}):
    urlHandler = urllib.urlopen(url, proxies=proxies)
    headers = urlHandler.info().headers
    length = 0
    for header in headers:
        if header.find('Length') != -1:
            length = header.split(':')[-1].strip()
            length = int(length)
    return length


def SpliteBlocks(totalsize, blocknumber):
    blocksize = totalsize / blocknumber
    ranges = []
    for i in range(0, blocknumber - 1):
        ranges.append((i * blocksize, i * blocksize + blocksize - 1))
    ranges.append((blocksize * (blocknumber - 1), totalsize - 1))

    return ranges


def unit_transfer(_size, div=1):
    size = _size / div
    if size < 1024:
        return "%.2fBytes" % size
    elif size < 1024 * 1024:
        return "%.2fKb" % (size * 1.0 / 1024)
    else:
        return "%.2fMb" % (size * 1.0 / 1024 / 1024)


def format_time(s):
    h_s = s/60/60
    min_s = (s - int(h_s)*3600)/60
    sec_s = s - int(h_s)*3600 - int(min_s)*60
    if s < 60:
        return '%.0fs' % sec_s
    elif s < 60 * 60:
        return '%.0fmin%.0fs' % (min_s, sec_s)
    else:
        return '%.0fh%.0fmin%.0fs' % (h_s, min_s, sec_s)


def flush_content(content):
    """ flush content
    :param content: content to write to flush
    :return: void
    """
    n = len(content)
    sys.stdout.write('\r' + content)
    sys.stdout.flush()
    time.sleep(2)

def islive(tasks):
    for task in tasks:
        if task.isAlive():
            return True
    return False


def paxel(url, output, blocks=6, proxies={}):
    ''' paxel
    '''
    size = GetUrlFileSize(url, proxies)
    ranges = SpliteBlocks(size, blocks)

    threadname = ["thread_%d" % i for i in range(0, blocks)]
    filename = ["tmpfile_%d" % i for i in range(0, blocks)]

    tasks = []
    for i in range(0, blocks):
        task = AxelPython(threadname[i], url, filename[i], ranges[i])
        task.setDaemon(True)
        task.start()
        tasks.append(task)

    time.sleep(2)
    old_downloaded = 1.0
    f_size = unit_transfer(size)
    start = time.time()
    old_time = start
    old_show = ''
    while islive(tasks):
        time.sleep(2)
        downloaded = sum([task.downloaded for task in tasks])
        process = downloaded / float(size) * 100
        f_downloaded = unit_transfer(downloaded)
        have_downloaded = downloaded - old_downloaded
        elapsed = time.time() - old_time

        if elapsed == 0 or have_downloaded == 0:
            continue

        dr = unit_transfer(have_downloaded, elapsed)
        old_time = time.time()
        old_downloaded = downloaded
        estimate_time = (size - downloaded) * elapsed / have_downloaded
        f_time = format_time(estimate_time)

        show_format = '\rTotal:%s DONE:%s PERCENT:%.2f%% DR:%s LT:%s'
        show = show_format % (f_size, f_downloaded, process, dr, f_time)
        n = max(len(show), len(old_show))
        sys.stdout.write(show.ljust(n))
        sys.stdout.flush()
        time.sleep(1)
        old_show = show


    filehandle = open(output, 'wb+')
    for i in filename:
        f = open(i, 'rb')
        filehandle.write(f.read())
        f.close()
        try:
            os.remove(i)
            pass
        except:
            pass
    filehandle.close()
    print '\rFinished...\r'


def add_parameters():
    """add parameters optons
    Returns:
        args dict
    """
    parser = argparse.ArgumentParser()
    parser.add_argument("-u", "--url",
                        dest="url",
                        required=True,
                        help="download url")
    parser.add_argument("-p", "--proxy",
                        default=False,
                        dest="proxy",
                        help="flags about use proxy default True")
    parser.add_argument("-n", "--nthreads",
                        default=10,
                        dest="threads",
                        type=int,
                        help="how many threads to used to download.")
    return parser.parse_args()


if __name__ == '__main__':

    # in case you want to use http_proxy
    local_proxies = {
        'http': 'http://127.0.0.1:1080',
        'https': 'http://127.0.0.1:1080',
    }

    # add a comment
    args = add_parameters()
    if args.proxy:
        proxy = local_proxies
    else:
        proxy = dict()
    nthreads = args.threads
    url = args.url

    output = 'tmp.file'
    try:
        rh = requests.get(url, stream=True)
        dp = rh.headers['Content-Disposition']
        output = (dp.split(';')[1]).split('=')[1]
    except:
        output = os.path.basename(url)

    paxel(url, output, blocks=nthreads, proxies=proxy)