from func_timeout import FunctionTimedOut, func_timeout
from multiprocessing.pool import ThreadPool
from tqdm import tqdm
import hashlib
import httpx
import math
import os


class FastDownloader(object):
    
    timeout = None

    def __init__(self,auth=None,max_time=25,block_size=1024*512*1,usebar=False):
        self.auth = auth
        self.usebar = usebar
        self.block_size = block_size
        self.max_time = max_time
        self.part_size = block_size*8
   
    def __enter__(self):
        return self

    @staticmethod
    def write_data(start,data,path):
        mode = 'rb+' if os.path.exists(path) else 'wb'
        with open(path,mode=mode) as file:
            file.seek(start)
            if isinstance(data,list):
                for chunk in data:
                    file.write(chunk)
            else:
                file.write(data)
            file.flush()
        return
    
    @staticmethod
    def download_data(url,auth,start,stop,timeout,bar):
        if stop == start:
            headers = None
        else:
            headers={'Range':f'bytes={start}-{stop}'}
        try:
            with httpx.stream(
                "GET",
                url,
                auth=auth,
                headers=headers,
                timeout=timeout) as response: 
                if not headers:
                    file_size = int(response.headers['content-length'])
                    return file_size
                else:
                    # file_size = int(response.headers['content-range'].split('/')[1])
                    # data = ''.join([chunk for chunk in response.iter_bytes(chunk_size=1024)])
                    data = None
                    for chunk in response.iter_bytes(chunk_size=1024):
                        data = chunk if not data else data+chunk
                    if bar: bar.update(len(data))
                    return data    
        except httpx.ReadTimeout as e:
            raise e
    
    @staticmethod
    def run_limittime(args):
        max_time,func,args_list = args
        while True:
            try:
                data = func_timeout(
                    max_time,
                    func,
                    args=tuple(args_list))
                return data
            except (FunctionTimedOut,httpx.ReadTimeout) as e:
                # print(e)
                pass
    
    @staticmethod
    def check_file(md5,file_size,path):
        if os.path.exists(path) and os.path.isfile(path):
            exist_size = os.path.getsize(path)
            if  exist_size == file_size:
                if md5:
                    with open(path, 'rb') as file: # md5校验
                        file.seek(0,0)
                        data = file.read()
                        file_md5= hashlib.md5(data).hexdigest()
                        if file_md5 == md5:
                            return True
                else:
                    return True
            else:
                return 
        return 
    
    def create_file(self,header_size,url,path,bar):
        if not os.path.exists(path):
            start = 0
            stop = header_size - 1
            data = self.run_limittime((
                self.max_time,
                self.download_data,
                [url,self.auth,start,stop,self.timeout,bar]
            ))
            self.write_data(start,data,path)
        return
    
    def get_argslist(self,url,exist_size,file_size,bar):
        
        stop_size = min(exist_size+self.part_size-1,file_size)
        
        down_size = stop_size - exist_size
        
        args_size = math.ceil(down_size / self.block_size)
        
        args_list = []
        
        for index in range(args_size):
            
            start = exist_size + index * self.block_size
            
            stop = min(start+self.block_size-1, stop_size)
            
            args = [url,self.auth,start,stop,self.timeout,bar]
  
            args_list.append([self.max_time,self.download_data,args])
            
            # args_list.append([start, stop])
            
        return args_list
    
    def thread_pool(self,url,path,file_size,bar,pool_size):
        exist_size = os.stat(path).st_size
        
        args_list = self.get_argslist(url,exist_size,file_size,bar)
        
        processes = min(pool_size, len(args_list))
        
        pool = ThreadPool(processes=processes)  
        
        data = pool.map(self.run_limittime,args_list)
        
        pool.close()
        
        pool.join()
        
        self.write_data(exist_size,data,path)
        return
    
    def execute(self,url,path,md5=None,header_size=1024,pool_size=5,desc='进度'):
        
        file_size = self.run_limittime((
            self.max_time,
            self.download_data,
            [url,auth,0,0,self.timeout,None]))

        if self.check_file(md5,file_size,path):return
        
        if os.path.exists(path):
            exist_size = os.stat(path).st_size
        else:
            exist_size = 0
        if self.usebar:
            bar = tqdm(
                total=file_size,
                initial=exist_size,
                desc=desc,
                ascii=True,
                mininterval=1,
                unit_scale=True,
                ncols=130,
                unit="B")
        else:
            bar = None
        
        self.create_file(header_size, url,path,bar)
    
        while not self.check_file(md5,file_size,path):
            
            self.thread_pool(url,path,file_size,bar,pool_size)
        return
    
    def __exit__(self,_type,_value,_trace):
        return 


if __name__ == "__main__":
    path = r"D:\c盘文件勿删！\桌面\fsdownload\新建文件夹\S2A_MSIL1C_20230308T030551_N0509_R075_T49RGJ_20230308T054546.zip"  # .incomplete
    url = "https://peps.cnes.fr/resto/collections/S2ST/7b276d88-205e-5f3e-9496-1db5ab87f828/download/?issuerId=peps"
    auth = ('fengzhipo@outlook.com','3edc@WSX')
    desc='S2A_MSIL1C_20230308T030551_N0509_R075_T49RGJ_20230308T054546'
    with FastDownloader(auth,usebar=True,max_time=25,block_size=1024*1024) as FD:
        FD.execute(url,path,header_size=1024*1024,pool_size=6,desc=desc)
    
    
    
 