import concurrent.futures
import os
import sys
import time
import urllib.error, requests
from lxml import html

from globally import LocalDataRootPath
from urllib import request
from concurrent.futures import ThreadPoolExecutor


class DownloadPack:
    def __init__(self, url, packlist):
        if url.endswith("/"):
            self.url = url + 'Packages'
        else:
            self.url = url + '/Packages'
        self.packlist = packlist

    def right_sha256(self):
        def get_file():
            response = requests.get(self.url[:-8] + 'repodata')
            if response.status_code == 200:
                html_content = response.text
            else:
                print("外网源访问错误")
                sys.exit(1)
            tree = html.fromstring(html_content)
            links = tree.xpath('//a/@href')
            for name in links:
                if "filelists.xml" in name:
                    return name
                else:
                    print("未找到标准sha256，包对比基准")
                    sys.exit(1)
        right_md5_filename = get_file()


    def download(self):
        dirpath = self.__urltodir
        print(dirpath)
        totalcount = len(self.packlist)
        print("总计需要下载： {}个包".format(len(self.packlist)))
        count = 0
        def task(packname, count):
            if "+" in packname:
                real_packname = packname.replace("+", "%2B")
            else:
                real_packname = packname
            downloadurl = self.url + "/" + real_packname
            destpath = dirpath + '/' + packname
            print("获取{}/{}: {}".format(count, totalcount, downloadurl))
            while True:
                try:
                    opener = urllib.request.build_opener()
                    opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1941.0 Safari/537.36')]
                    urllib.request.install_opener(opener)
                    urllib.request.urlretrieve(downloadurl, destpath)
                    time.sleep(0.5)
                    break
                except urllib.error.ContentTooShortError:
                    pass
                except:
                    os.remove(destpath)
        # 创建线程池
        futures = []
        pool = ThreadPoolExecutor(30)
        for packname in self.packlist:
            count = count + 1
            future = pool.submit(task, packname, count)
            futures.append(future)
        concurrent.futures.wait(futures)
        pool.shutdown()


    # 通过url获取对应源地址存储于本地的具体路径
    @property
    def __urltodir(self):
        dirlist = self.url.split('/')[3:]
        dir = LocalDataRootPath + '/'.join(dirlist)
        if not os.path.exists(dir):
            os.makedirs(dir)
        return dir