from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from datetime import datetime
from tqdm import tqdm
import threading
import fasteners
import functools
import requests
import aiofiles
import psycopg2
import asyncio
import aiohttp
import queue
import json
import time
import os


def run_in_thread(name,daemon=True):
    def decorator(func):
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            thread = threading.Thread(
                target=func, 
                args=args, 
                name=name,
                daemon=daemon,
                kwargs=kwargs)
            thread.start()
            return thread
        return wrapper
    return decorator


class Download:

    concurrent_downloads = 4
    semaphore = asyncio.Semaphore(concurrent_downloads)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    session_kwargs = {
        'timeout': aiohttp.ClientTimeout(total=600),
        'connector': aiohttp.TCPConnector(limit=4)}
    tqdm_kwargs = {
        "disable":False,
        "ncols":150,
        "nrows":100,
        "smoothing":True,
        "unit":"B",
        "unit_scale":True,
        "unit_divisor":1024}
    out_queue = queue.Queue()
    
    def __init__(self,account,agent,save_dir):
        self.save_dir = save_dir
        self.proxy = f"http://{agent['ip']}:{agent['port']}"
        self.proxies = {
            'http': f"{agent['ip']}:{agent['port']}",
            'https': f"{agent['ip']}:{agent['port']}"}
        self.refresh_data = {
            'grant_type': 'password',
            'username': account['username'],
            'password': account['password'],
            'client_id': 'cdse-public'}
        threading.Thread(target=self.refresher,name="刷新token线程",daemon=True).start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute): time.sleep(1)

    def refresher(self):
        data = self.refresh_data
        while True:  # 线程循环运行
            try:  # 尝试获得访问token和刷新token
                with requests.post(
                    "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
                    headers={'Content-Type': 'application/x-www-form-urlencoded'},
                    data=data,
                    proxies=self.proxies,
                    timeout=60) as response:
                    result = response.json()
                    if response.status_code == 200: # 若接受，获得访问token和刷新token
                        self.__access_token = result['access_token']
                        interval = result['expires_in'] - 1
                        data = {
                                'grant_type': 'refresh_token',
                                'refresh_token': result['refresh_token'],
                                'client_id': 'cdse-public'}
                    else: # 若拒绝，使用账户获得访问token和刷新token
                        interval = 1
                        data = self.refresh_data
            except requests.RequestException: # 若报错，使用账户获得访问token和刷新token
                interval = 1
                data = self.refresh_data
            time.sleep(interval)
        return

    async def filelocker(self,task_queue):
        while not task_queue.empty():
            name, file_path, productid = task_queue.get()
            if os.path.exists(file_path):
                self.out_queue.put((True,(name,file_path,datetime.now())))
                continue
            temp_path = file_path + ".incomplete"
            filelock = fasteners.InterProcessLock(temp_path)
            if filelock.acquire(blocking=False):
                status = await self.downloader(productid,temp_path)
                filelock.release()
                if status:
                    os.rename(temp_path, file_path)  
                    self.out_queue.put((True,(name,file_path,datetime.now())))
                else:
                    self.out_queue.put((False,(name,file_path,datetime.now())))
            else:
                self.out_queue.put((None,(name,file_path,datetime.now())))
        return
    
    async def downloader(self,productid,temp_path): 
        kwargs = {
            'url':f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
            'headers':{"Authorization": f"Bearer {self.__access_token}"},
            'proxy':self.proxy,
            'timeout':600}
        async with self.semaphore:
            async with aiohttp.ClientSession() as session:
                try:
                    async with session.get(**kwargs) as response:
                        if response.status == 200:
                            file_size = int(response.headers.get('Content-Length'))
                            file_name = os.path.basename(temp_path)
                            async with aiofiles.open(temp_path, 'wb') as file_handle: 
                                with tqdm(total=file_size,desc=file_name,**self.tqdm_kwargs) as pbar:
                                    async for chunk in response.content.iter_any():
                                        await file_handle.write(chunk)
                                        await file_handle.flush()
                                        pbar.update(len(chunk))
                            status = True 
                        else:
                            data = await response.json()
                            print(response.status,data) # raise Exception("超过最大连接数")
                            status = False 
                except asyncio.exceptions.CancelledError as e:
                    status = False 
                except: 
                    status = False
            return status

    async def controller(self,task_queue):
        workers = []
        for index in range(self.concurrent_downloads):
            worker = asyncio.create_task(
                self.filelocker(task_queue),
                name=f'下载任务-{index}')
            workers.append(worker)
        await asyncio.gather(*workers)
        return

    def execute(self,task_queue):
        asyncio.run(self.controller(task_queue),debug=True)
        return
    
    def daemon(self,task_queue):
        thread = run_in_thread(
            name='下载线程',
            daemon=False)(self.execute)(task_queue)
        return thread,self.out_queue


def args_2_info(cargs,qhdm,sdate,edate,cloud,savedir):
    connection = psycopg2.connect(**cargs)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""SELECT name,TO_CHAR(product_date,'YYYYMM'),id
            FROM sentinel2_l1c
            WHERE product_date BETWEEN '{sdate}'
            AND '{edate}'
            AND cloud <= {cloud}
            AND online is true
            AND zip_path is null
            AND tile in (
                SELECT distinct ct.tile
                FROM china_tile AS ct
                JOIN dt_sy AS ds 
                ON st_intersects(ct.geom,ds.shape)
                WHERE ds.qhdm = '{qhdm}'
            );
            """
    try:
        cursor.execute(query)
        connection.commit()
        data = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close()
    task_queue = queue.Queue()
    for name,yearmonth,pid in data:
        datedir = os.path.join(savedir,yearmonth)
        if not os.path.exists(datedir):os.makedirs(datedir)
        zip_path = os.path.join(datedir,f"{name}.zip")
        task_queue.put((name,zip_path,pid))
    return task_queue


def data_2_table(cargs,data):
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
        INSERT INTO sentinel2_l1c
        (name,zip_path,create_date) 
        VALUES %s
        ON CONFLICT (name)
        DO UPDATE SET 
        zip_path=excluded.zip_path,
        create_date=excluded.create_date;
        """
    execute_values(cursor, query, data)
    cursor.close()
    connection.close()
    return


def main(qhdm,sdate,edate,save_dir,cloud=100):
    with open('./config.json',"r") as filehandle:
        params = json.load(filehandle)
    cargs = params["cargs"]
    agent = params["agent"]
    account = params["accounts"][0]
    task_queue = args_2_info(cargs,qhdm,sdate,edate,cloud,save_dir)
    download = Download(account,agent,save_dir)
    thread,out_queue = download.daemon(task_queue)
    thread.join()
    records = []
    while not out_queue.empty():
        status,record = out_queue.get()
        if status:records.append(record)
    data_2_table(cargs,records)
    return


if __name__ == "__main__":
    qhdm = '41'
    sdate = '2024-08-01'
    edate = '2024-09-01'
    cloud = 100
    save_dir = '/home/fengyang/data/202401/sentinel_test'
    main(qhdm,sdate,edate,save_dir,cloud)

