# from tqdm.asyncio import tqdm
from tqdm import tqdm
import threading
import fasteners
import requests
import aiofiles
import asyncio
import aiohttp
import logging
import queue
import fcntl
import time
import os


class Download:
    # download_queue = asyncio.PriorityQueue(maxsize=10000)
    download_queue = queue.PriorityQueue(maxsize=10000)
    stop_event = asyncio.Event()
    concurrent_downloads = 2
    semaphore = asyncio.Semaphore(concurrent_downloads)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    session_kwargs = {
        'timeout': aiohttp.ClientTimeout(total=600),
        'connector': aiohttp.TCPConnector(limit=4)}
    tqdm_kwargs = {
        "disable":False,
        "ncols":150,
        "nrows":100,
        "smoothing":True,
        "unit":"B",
        "unit_scale":True,
        "unit_divisor":1024}
    
    def __init__(self,account,agent,save_dir,attmpt,logger):
        self.save_dir = save_dir
        self.proxy = f"http://{agent['ip']}:{agent['port']}"
        self.proxies = {
            'http': f"{agent['ip']}:{agent['port']}",
            'https': f"{agent['ip']}:{agent['port']}"}
        self.attmpt = attmpt
        self.refresh_data = {
            'grant_type': 'password',
            'username': account['username'],
            'password': account['password'],
            'client_id': 'cdse-public'}
        self.logger = logger
        threading.Thread(target=self.refresher,name="刷新token线程",daemon=True).start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute): time.sleep(1)
        
    @property
    def exist_products(self):
        exist_names = set()
        for handler in self.logger.handlers:
            if isinstance(handler, logging.Handler):
                log_path = handler.filename
        if os.path.exists(log_path): # 如果记录文件存在
            with open(log_path, 'r') as file:
                for line in file.readlines():
                    if " - INFO - success: " in line:  # 获取success标记的name
                        name = line.split(" - INFO - success: ")[1].replace('\n', '')
                        exist_names.add(name)
        return exist_names
    
    def refresher(self):
        data = self.refresh_data
        while True:  # 线程循环运行
            try:  # 尝试获得访问token和刷新token
                with requests.post(
                    "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
                    headers={'Content-Type': 'application/x-www-form-urlencoded'},
                    data=data,
                    proxies=self.proxies,
                    timeout=60) as response:
                    result = response.json()
                    if response.status_code == 200: # 若接受，获得访问token和刷新token
                        self.__access_token = result['access_token']
                        interval = result['expires_in'] - 1
                        data = {
                                'grant_type': 'refresh_token',
                                'refresh_token': result['refresh_token'],
                                'client_id': 'cdse-public'}
                    else: # 若拒绝，使用账户获得访问token和刷新token
                        interval = 1
                        data = self.refresh_data
            except requests.RequestException: # 若报错，使用账户获得访问token和刷新token
                interval = 1
                data = self.refresh_data
            time.sleep(interval)
        return
    
    def inspector(self,product_queue,transfer_queue):
        while True:
            element = product_queue.get()
            if element is None: break # 若不为队列末尾结束元素，添加下载记录和传送队列
            name,date,productid = element
            zip_path = os.path.join(self.save_dir,date.strftime("%Y%m"),f"{name}.zip")
            if name not in self.exist_products:  # 未在下载记录中
                if os.path.exists(zip_path): self.logger.info(f"success: {name}")
                else:  # 若文件不存在，添加下载队列
                    priority = -int(date.strftime("%Y%m%d"))
                    self.download_queue.put((priority, (name,zip_path,productid,self.attmpt)))
            else:  # 若文件已在下载记录中，添加传输队列
                transfer_queue.put(zip_path)
        self.stop_event.set()
        return
    
    async def woker(self,transfer_queue):
        while not (self.stop_event.is_set() and self.download_queue.empty()):
            priority,element = self.download_queue.get()
            name, file_path, productid, attmpt = element
            if os.path.exists(file_path):
                self.logger.info(f"success: {name}")
                transfer_queue.put(file_path)
                continue
            if attmpt == 0: 
                self.logger.info(f"failure: {name}")
                continue
            # start download
            attmpt = attmpt-1
            temp_path = file_path + ".incomplete"
            filelock = fasteners.InterProcessLock(temp_path)
            if filelock.acquire(blocking=False):
                status = await self.downloader(productid,temp_path)
                filelock.release()
                if status:
                    os.rename(temp_path, file_path)
                    self.logger.info(f"success: {name}")    
                    transfer_queue.put(file_path) 
                else:
                    element = (name,file_path,productid,attmpt)
                    self.download_queue.put((priority,element))
            else:
                element = (name,file_path,productid,attmpt)
                self.download_queue.put((priority,element))  
        return
    
    async def downloader(self,productid,temp_path): 
        async with self.semaphore: # **self.session_kwargs
            async with aiohttp.ClientSession() as session:
                try:
                    async with session.get(
                        url=f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
                        headers={"Authorization": f"Bearer {self.__access_token}"},
                        proxy=self.proxy,
                        timeout=600) as response:
                        if response.status == 200:
                            file_size = int(response.headers.get('Content-Length'))
                            file_name = os.path.basename(temp_path)
                            async with aiofiles.open(temp_path, 'wb') as file_handle: 
                                with tqdm(total=file_size,desc=file_name,**self.tqdm_kwargs) as pbar:
                                    async for chunk in response.content.iter_any():
                                        await file_handle.write(chunk)
                                        await file_handle.flush()
                                        pbar.update(len(chunk))
                            status = True 
                        else:
                            data = await response.json()
                            # raise Exception("超过最大连接数")
                            print(response.status,data)
                            status = False 
                except asyncio.exceptions.CancelledError as e:
                    status = False 
            return status

    async def controller(self,product_queue,transfer_queue):
        workers = []
        for index in range(self.concurrent_downloads):
            worker = asyncio.create_task(
                self.woker(transfer_queue),
                name=f'下载异步任务-{index}')
            workers.append(worker)
        await asyncio.gather(*workers)
        return


def core(account,save_dir,attmpt,agent,product_queue,transfer_queue,logger):
    download = Download(account,agent,save_dir,attmpt,logger)
    threading.Thread(
        target=download.inspector,
        args=(product_queue,transfer_queue),
        name="下载排序线程",
        daemon=True).start()
    asyncio.run(download.controller(product_queue,transfer_queue),debug=True)
    return


