
import os
import sys
import shutil
import hashlib
import requests

g_index = 0

# cal md5 for url
def md5(url):
    en=url.encode()
    h=hashlib.md5(en) 
    return h.hexdigest()

# is need to download
def is_need_download(url, to_dir):
    md5_s = md5(url)
    tar_dir = to_dir + '/' + md5_s
    if os.path.exists(tar_dir):
        return False
    return True

# download url to tmp dir
def download_tmp(url, to_dir):
    try:
        r = requests.get(url)
        
        name = os.path.split(url)[-1]
        with open(os.path.join(to_dir,name),"wb") as f:
            f.write(r.content)
        return 0
    except:
        return -1

def download(url, to_dir):
    global g_index
    if not is_need_download(url, to_dir):
        return 0
    
    md5_s = md5(url)
    tmp_dir = to_dir + '/' + str(g_index)
    real_dir = to_dir + '/' + md5_s
    g_index = g_index + 1
    # create tmp_dir
    if os.path.exists(tmp_dir):
        # remove
        shutil.rmtree(tmp_dir)
        
    # reate
    os.mkdir(tmp_dir)

    re = download_tmp(url, tmp_dir)
    if re == 0:
        # rename tmp_dir
        os.rename(tmp_dir, real_dir)

        return 0
    else:
        return -1

# load url list
def load_package_list(f):
    urls = []
    with open(f, 'r') as fp:
        line = fp.readline()
        while line:
            if line.find('Package:')==0 or line.find('url:')==0:
                pass
            else:
                urls.append(line)
            line = fp.readline()
    return urls


if len(sys.argv) != 3:
    print("python download.py packagelist.txt target_dir")
    sys.exit(-1)

url_list = load_package_list(sys.argv[1])
to_dir = sys.argv[2]


for url in url_list:
    url = url.replace('\n', '')
    url = url.replace('\r', '')
    print("downloading ..", url)
    re = download(url, to_dir)
    if (re == 0) :
        print("download success!")
    else:
        print("download fail!")