# -*- coding: utf-8 -*-
"""
    :copyright: (c) 2019 by enjoyZHou on 2019-06-28
"""
import sys
import os
import requests
from contextlib import closing

# reload(sys)
# sys.setdefaultencoding('utf8')


def download(urls, save_dir):
    """
    下载开始
    :param urls:
    :param save_dir:
    :return:
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
        + 'Chrome/63.0.3239.132 Safari/537.36'
    }

    def _down_loads(urls_li, file_path):
        # 创建目录
        if not os.path.exists(file_path):
            os.makedirs(file_path)
        # urls_li = urls_li[0:2]
        for _url in urls_li:
            if isinstance(_url, list):
                _url = ''.join(_url)
            # 设置文件名
            file_dir, full_filename = os.path.split(_url)
            # 真正的文件路径
            _file_path = os.path.join(file_path, full_filename)
            ld_f_size = 0
            if os.path.isfile(_file_path):
                ld_f_size = os.path.getsize(_file_path)
            with closing(requests.get(_url, headers=headers, stream=True)) as response:
                if response.status_code != 200:
                    continue
                chunk_size = 1024000  # 单次请求最大值
                content_size = int(response.headers['content-length'])  # 内容体总大小
                if int(ld_f_size) == content_size:
                    continue
                data_count = 0
                with open(_file_path, "wb") as _file:
                    for data in response.iter_content(chunk_size=chunk_size):
                        _file.write(data)
                        data_count = data_count + len(data)
                        data_count = float(data_count)
                        now_jd = (data_count / content_size) * 100
                        sys.stdout.write('\r 文件下载进度：%d%%(%d/%d) - %s' % (now_jd, data_count, content_size, _file_path))
                        sys.stdout.flush()
                    print('\n')

    # 创建目录
    if not os.path.exists(save_dir):
        os.makedirs(save_dir)
    if isinstance(urls, list):
        _down_loads(urls, save_dir)
    elif isinstance(urls, dict):
        for k, v in urls.items():
            _dir = os.path.join(save_dir, str(k))
            _down_loads(v, _dir)
    else:
        pass
