from tqdm import tqdm
import threading
import fasteners
import requests
import aiofiles
import asyncio
import aiohttp
import logging
import queue
import fcntl
import time
import os
import daemon
import redis
import json


class MaxLinesHandler(logging.Handler):
    
    def __init__(self, filename,max_lines=None):
        super().__init__()
        self.max_lines = max_lines
        self.filename = filename
    
    def emit(self, record):
        with open(self.filename, 'a') as file_handle:
            file_handle.write(self.format(record) + '\n')
        with open(self.filename, 'r') as file_handle:
            lines = file_handle.readlines()
        if self.max_lines is not None:
            if len(lines) > self.max_lines:
                with open(self.filename, 'w') as file_handle:
                    file_handle.writelines(lines[-self.max_lines:])


class SameLogFilter(logging.Filter):
    def __init__(self):
        self.last_log = None

    def filter(self, record):
        current_log = record.getMessage()
        if current_log != self.last_log:
            self.last_log = current_log
            return True
        return False


class Download:
    stop_event = asyncio.Event()
    concurrent_downloads = 4
    semaphore = asyncio.Semaphore(concurrent_downloads)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    session_kwargs = {
        'timeout': aiohttp.ClientTimeout(total=600),
        'connector': aiohttp.TCPConnector(limit=4)}
    tqdm_kwargs = {
        "disable":False,
        "ncols":150,
        "nrows":100,
        "smoothing":True,
        "unit":"B",
        "unit_scale":True,
        "unit_divisor":1024}
    
    def __init__(self,account,agent):
        self.proxy = f"http://{agent['ip']}:{agent['port']}"
        self.proxies = {
            'http': f"{agent['ip']}:{agent['port']}",
            'https': f"{agent['ip']}:{agent['port']}"}
        self.refresh_data = {
            'grant_type': 'password',
            'username': account['username'],
            'password': account['password'],
            'client_id': 'cdse-public'}
        threading.Thread(target=self.refresher,name="刷新token线程",daemon=True).start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute): time.sleep(1)
        
    def refresher(self):
        data = self.refresh_data
        while True:  # 线程循环运行
            
            try:  # 尝试获得访问token和刷新token
                with requests.post(
                    "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
                    headers={'Content-Type': 'application/x-www-form-urlencoded'},
                    data=data,
                    proxies=self.proxies,
                    timeout=60) as response:
                    result = response.json()
                    if response.status_code == 200: # 若接受，获得访问token和刷新token
                        self.__access_token = result['access_token']
                        interval = result['expires_in'] - 1
                        data = {
                                'grant_type': 'refresh_token',
                                'refresh_token': result['refresh_token'],
                                'client_id': 'cdse-public'}
                    else: # 若拒绝，使用账户获得访问token和刷新token
                        interval = 1
                        data = self.refresh_data
            except requests.RequestException: # 若报错，使用账户获得访问token和刷新token
                interval = 1
                data = self.refresh_data
            time.sleep(interval)
        return

    async def woker(self,download_queue,redisr):
        while not (self.stop_event.is_set() and download_queue.empty()):
            priority, element = download_queue.get()
            name, file_path, productid = element
            if os.path.exists(file_path):
                redisr.rpush('downloader_raster',file_path)
                continue
            temp_path = file_path + ".incomplete"
            filelock = fasteners.InterProcessLock(temp_path)
            if filelock.acquire(blocking=False):
                status = await self.downloader(productid,temp_path)
                filelock.release()
                if status:
                    os.rename(temp_path, file_path)
                    redisr.rpush('downloader_raster',file_path)
                else:
                    print(f"下载失败: {file_path}")
        return
    
    async def downloader(self,productid,temp_path): 
        async with self.semaphore: # **self.session_kwargs
            async with aiohttp.ClientSession() as session:
                try:
                    async with session.get(
                        url=f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
                        headers={"Authorization": f"Bearer {self.__access_token}"},
                        proxy=self.proxy,
                        timeout=600) as response:
                        if response.status == 200:
                            file_size = int(response.headers.get('Content-Length'))
                            file_name = os.path.basename(temp_path)
                            async with aiofiles.open(temp_path, 'wb') as file_handle: 
                                with tqdm(total=file_size,desc=file_name,**self.tqdm_kwargs) as pbar:
                                    async for chunk in response.content.iter_any():
                                        await file_handle.write(chunk)
                                        await file_handle.flush()
                                        pbar.update(len(chunk))
                            status = True 
                        else:
                            data = await response.json()
                            print(response.status,data)
                            status = False 
                except asyncio.exceptions.CancelledError as e:
                    status = False 
            await asyncio.sleep(5)
            return status

    async def controller(self,download_queue,redisr):
        workers = []
        for index in range(self.concurrent_downloads):
            worker = asyncio.create_task(
                self.woker(download_queue,redisr),
                name=f'下载异步任务-{index}')
            workers.append(worker)
        await asyncio.gather(*workers)
        return


def inspector(save_dir,download_queue,redisr):
    exist_products = set()
    while True:
        _,element = redisr.brpop('searcher_downloader',0)
        element = element.decode('utf-8')
        data = json.loads(element)
        name,date,productid = data
        zip_path = os.path.join(save_dir,date[:6],f"{name}.zip")
        if os.path.exists(zip_path): redisr.lpush('downloader_raster', zip_path)
        else: download_queue.put((-int(date), (name,zip_path,productid)))
    return


def main():
    with open('/data/jiabing/RS_CODE/Sentinel2download/config.json',"r") as file:
        params = json.load(file)
    agent = params["agent"]
    cargs = params["cargs"]
    account = params["accounts"][0]
    save_dir = params["save_dir"]
    rediscon = params["rediscon"]
    pool = redis.ConnectionPool(**rediscon)
    redisr = redis.Redis(connection_pool=pool)
    download_queue = queue.PriorityQueue(maxsize=10000)
    threading.Thread(
        target=inspector,
        args=(save_dir,download_queue,redisr),
        name="下载排序线程",
        daemon=True).start()
    download = Download(account,agent)
    asyncio.run(download.controller(download_queue,redisr),debug=True)
    return


if __name__ == "__main__":
    with daemon.DaemonContext(
        stdout=open(r"/data/logfile/DownloadDaemon_out.py.log","w"),
        stderr=open(r"/data/logfile/DownloadDaemon_err.py.log","w")):
        main()
