from celery import Celery

from tqdm import tqdm
import threading
import fasteners
import requests
import aiofiles
import asyncio
import aiohttp
import logging
import queue
import fcntl
import time
import os
import daemon
import redis
import json


password = 'foobared'
celeryapp = Celery(
    "async_task", # app.import_name, # async_task
    broker=f'redis://:{password}@192.168.2.172:6379/0',
    backend=f'redis://:{password}@192.168.2.172:6379/1')


class MaxLinesHandler(logging.Handler):
    
    def __init__(self, filename,max_lines=None):
        super().__init__()
        self.max_lines = max_lines
        self.filename = filename
    
    def emit(self, record):
        with open(self.filename, 'a') as file_handle:
            file_handle.write(self.format(record) + '\n')
        with open(self.filename, 'r') as file_handle:
            lines = file_handle.readlines()
        if self.max_lines is not None:
            if len(lines) > self.max_lines:
                with open(self.filename, 'w') as file_handle:
                    file_handle.writelines(lines[-self.max_lines:])


class SameLogFilter(logging.Filter):
    def __init__(self):
        self.last_log = None

    def filter(self, record):
        current_log = record.getMessage()
        if current_log != self.last_log:
            self.last_log = current_log
            return True
        return False


class Download:
    stop_event = asyncio.Event()
    concurrent_downloads = 4
    semaphore = asyncio.Semaphore(concurrent_downloads)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    session_kwargs = {
        'timeout': aiohttp.ClientTimeout(total=600),
        'connector': aiohttp.TCPConnector(limit=4)}
    tqdm_kwargs = {
        "disable":False,
        "ncols":150,
        "nrows":100,
        "smoothing":True,
        "unit":"B",
        "unit_scale":True,
        "unit_divisor":1024}
    
    def __init__(self,account,agent,task_id):
        self.proxy = f"http://{agent['ip']}:{agent['port']}"
        self.proxies = {
            'http': f"{agent['ip']}:{agent['port']}",
            'https': f"{agent['ip']}:{agent['port']}"}
        self.refresh_data = {
            'grant_type': 'password',
            'username': account['username'],
            'password': account['password'],
            'client_id': 'cdse-public'}
        self.task_id = task_id
        threading.Thread(target=self.refresher,name="刷新token线程",daemon=True).start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute): time.sleep(1)
        
    def refresher(self):
        data = self.refresh_data
        while True:  # 线程循环运行
            try:  # 尝试获得访问token和刷新token
                with requests.post(
                    "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
                    headers={'Content-Type': 'application/x-www-form-urlencoded'},
                    data=data,
                    proxies=self.proxies,
                    timeout=60) as response:
                    result = response.json()
                    if response.status_code == 200: # 若接受，获得访问token和刷新token
                        self.__access_token = result['access_token']
                        interval = result['expires_in'] - 1
                        data = {
                                'grant_type': 'refresh_token',
                                'refresh_token': result['refresh_token'],
                                'client_id': 'cdse-public'}
                    else: # 若拒绝，使用账户获得访问token和刷新token
                        interval = 1
                        data = self.refresh_data
            except requests.RequestException: # 若报错，使用账户获得访问token和刷新token
                interval = 1
                data = self.refresh_data
            time.sleep(interval)
        return

    async def woker(self,download_queue,redisr):
        while not (self.stop_event.is_set() and download_queue.empty()):
            priority,element = download_queue.get()
            name, file_path, productid, attmpt = element
            if os.path.exists(file_path):
                with open(f"{self.task_id}.txt","a+") as file_handle:
                    file_handle.write(f"success: {name}\n")
                redisr.rpush('downloader_transfer',file_path)
                continue
            if attmpt == 0: 
                print()(f"failure: {name}")
                continue
            # start download
            attmpt = attmpt-1
            temp_path = file_path + ".incomplete"
            filelock = fasteners.InterProcessLock(temp_path)
            if filelock.acquire(blocking=False):
                status = await self.downloader(productid,temp_path)
                filelock.release()
                if status:
                    os.rename(temp_path, file_path)
                    with open(f"{self.task_id}.txt","a+") as file_handle:
                        file_handle.write(f"success: {name}\n")
                    redisr.rpush('downloader_transfer',file_path)
                else:
                    element = (name,file_path,productid,attmpt)
                    download_queue.put((priority,element))
            else:
                element = (name,file_path,productid,attmpt)
                download_queue.put((priority,element))  
        return
    
    async def downloader(self,productid,temp_path): 
        async with self.semaphore: # **self.session_kwargs
            async with aiohttp.ClientSession() as session:
                try:
                    async with session.get(
                        url=f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
                        headers={"Authorization": f"Bearer {self.__access_token}"},
                        proxy=self.proxy,
                        timeout=600) as response:
                        if response.status == 200:
                            file_size = int(response.headers.get('Content-Length'))
                            file_name = os.path.basename(temp_path)
                            async with aiofiles.open(temp_path, 'wb') as file_handle: 
                                with tqdm(total=file_size,desc=file_name,**self.tqdm_kwargs) as pbar:
                                    async for chunk in response.content.iter_any():
                                        await file_handle.write(chunk)
                                        await file_handle.flush()
                                        pbar.update(len(chunk))
                            status = True 
                        else:
                            data = await response.json()
                            # raise Exception("超过最大连接数")
                            print(response.status,data)
                            status = False 
                except asyncio.exceptions.CancelledError as e:
                    status = False 
            return status

    async def controller(self,download_queue,redisr):
        workers = []
        for index in range(self.concurrent_downloads):
            worker = asyncio.create_task(
                self.woker(download_queue,redisr),
                name=f'下载异步任务-{index}')
            workers.append(worker)
        await asyncio.gather(*workers)
        return


def setup_logger(name,max_lines,log_file):
    # 创建日志记录器
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    # 创建,添加自定义的 Handler文件处理程序
    file_handler = MaxLinesHandler(filename=log_file,max_lines=max_lines)
    file_handler.setLevel(logging.INFO)
    # 创建,添加自定义过滤器
    log_filter = SameLogFilter()
    file_handler.addFilter(log_filter)
    # 创建日志格式器
    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
    file_handler.setFormatter(formatter)
    # 添加处理程序到日志记录器
    logger.addHandler(file_handler)
    return logger


def inspector(save_dir,attmpt,download_queue,task_id,redisr):
    exist_products = set()
    if os.path.exists(f"{task_id}.txt"): # 如果记录文件存在
        with open(f"{task_id}.txt","r") as file_handle:
            for line in file.readlines():
                if "success: " in line:  # 获取success标记的name
                    name = line.split("success: ")[1].replace('\n', '')
                    exist_products.add(name)
    while True:
        _,element = redisr.brpop('searcher_downloader',0)
        element = element.decode('utf-8')
        if element == "None": break # 若不为队列末尾结束元素，添加下载记录和传送队列
        data = json.loads(element)
        name,date,productid = data
        zip_path = os.path.join(save_dir,date[:6],f"{name}.zip")
        if name not in exist_products:  # 未在下载记录中
            if os.path.exists(zip_path): 
                with open(f"{task_id}.txt","a+") as file_handle:
                    file_handle.write(f"success: {name}\n")
                redisr.rpush('downloader_transfer', zip_path)
            else:  # 若文件不存在，添加下载队列
                priority = -int(date)
                download_queue.put((priority, (name,zip_path,productid,attmpt)))
        else:  # 若文件已在下载记录中，添加传输队列
            redisr.rpush('downloader_transfer', zip_path)
    stop_event.set()
    return


@celeryapp.task
def main():
    task_id = "test_1234"
    download_queue = queue.PriorityQueue(maxsize=10000)
    with open('./config.json',"r") as file:
        params = json.load(file)
    attmpt = 15
    agent = params["agent"]
    cargs = params["cargs"]
    account = params["accounts"][0]
    save_dir = params["save_dir"]
    pool = redis.ConnectionPool(
        host='192.168.2.172',
        port=6379,
        password='foobared',
        db=0)
    redisr = redis.Redis(connection_pool=pool)
    threading.Thread(
        target=inspector,
        args=(save_dir,attmpt,download_queue,task_id,redisr),
        name="下载排序线程",
        daemon=True).start()
    download = Download(account,agent,task_id)
    asyncio.run(download.controller(download_queue,redisr),debug=True)
    redisr.rpush('downloader_transfer', "None")
    return


if __name__ == "__main__":
    # main()
    log_path = r'/data/fengyy/logfile/DownloadCelery.log'
    args = ['worker',
            '--concurrency=4',
            '--loglevel=INFO',
            '--detach', 
            f'--logfile={log_path}']
    celeryapp.worker_main(argv=args)

