from requests.packages.urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from datetime import datetime
from moviepy.editor import VideoFileClip
from tqdm import tqdm
import requests
import psycopg2
import threading
import fasteners
import aiofiles
import asyncio
import aiohttp
import logging
import queue
import fcntl
import time
import re
import os

import bilibilivideo
from Async import app_celery



class Download:
    
    # download_queue = asyncio.PriorityQueue(maxsize=10000)
    download_queue = queue.PriorityQueue(maxsize=10000)
    stop_event = asyncio.Event()
    concurrent_downloads = 2
    semaphore = asyncio.Semaphore(concurrent_downloads)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    session_kwargs = {
        'timeout': aiohttp.ClientTimeout(total=600),
        'connector': aiohttp.TCPConnector(limit=4)}
    tqdm_kwargs = {
        "disable":False,
        "ncols":150,
        "nrows":100,
        "smoothing":True,
        "unit":"B",
        "unit_scale":True,
        "unit_divisor":1024}
    
    def __init__(self,account,agent,save_dir,attmpt):
        self.save_dir = save_dir
        self.proxy = f"http://{agent['ip']}:{agent['port']}"
        self.proxies = {
            'http': f"{agent['ip']}:{agent['port']}",
            'https': f"{agent['ip']}:{agent['port']}"}
        self.attmpt = attmpt
        self.refresh_data = {
            'grant_type': 'password',
            'username': account['username'],
            'password': account['password'],
            'client_id': 'cdse-public'}
        threading.Thread(target=self.refresher,name="刷新token线程",daemon=True).start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute): time.sleep(1)

    
    def refresher(self):
        data = self.refresh_data
        while True:  # 线程循环运行
            try:  # 尝试获得访问token和刷新token
                with requests.post(
                    "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
                    headers={'Content-Type': 'application/x-www-form-urlencoded'},
                    data=data,
                    proxies=self.proxies,
                    timeout=60) as response:
                    result = response.json()
                    if response.status_code == 200: # 若接受，获得访问token和刷新token
                        self.__access_token = result['access_token']
                        interval = result['expires_in'] - 1
                        data = {
                                'grant_type': 'refresh_token',
                                'refresh_token': result['refresh_token'],
                                'client_id': 'cdse-public'}
                    else: # 若拒绝，使用账户获得访问token和刷新token
                        interval = 1
                        data = self.refresh_data
            except requests.RequestException: # 若报错，使用账户获得访问token和刷新token
                interval = 1
                data = self.refresh_data
            time.sleep(interval)
        return
    
    def inspector(self,product_queue,transfer_queue):
        while True:
            element = product_queue.get()
            if element is None: break # 若不为队列末尾结束元素，添加下载记录和传送队列
            name,date,productid = element
            zip_path = os.path.join(self.save_dir,date.strftime("%Y%m"),f"{name}.zip")
            if name not in self.exist_products:  # 未在下载记录中
                if os.path.exists(zip_path): self.logger.info(f"success: {name}")
                else:  # 若文件不存在，添加下载队列
                    priority = -int(date.strftime("%Y%m%d"))
                    self.download_queue.put((priority, (name,zip_path,productid,self.attmpt)))
            else:  # 若文件已在下载记录中，添加传输队列
                transfer_queue.put(zip_path)
        self.stop_event.set()
        return
    
    async def woker(self,transfer_queue):
        while not (self.stop_event.is_set() and self.download_queue.empty()):
            priority,element = self.download_queue.get()
            name, file_path, productid, attmpt = element
            if os.path.exists(file_path):
                self.logger.info(f"success: {name}")
                transfer_queue.put(file_path)
                continue
            if attmpt == 0: 
                self.logger.info(f"failure: {name}")
                continue
            # start download
            attmpt = attmpt-1
            temp_path = file_path + ".incomplete"
            filelock = fasteners.InterProcessLock(temp_path)
            if filelock.acquire(blocking=False):
                status = await self.downloader(productid,temp_path)
                filelock.release()
                if status:
                    os.rename(temp_path, file_path)
                    self.logger.info(f"success: {name}")    
                    transfer_queue.put(file_path) 
                else:
                    element = (name,file_path,productid,attmpt)
                    self.download_queue.put((priority,element))
            else:
                element = (name,file_path,productid,attmpt)
                self.download_queue.put((priority,element))  
        return
    
    async def downloader(self,productid,temp_path): 
        async with self.semaphore: # **self.session_kwargs
            async with aiohttp.ClientSession() as session:
                try:
                    async with session.get(
                        url=f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
                        headers={"Authorization": f"Bearer {self.__access_token}"},
                        proxy=self.proxy,
                        timeout=600) as response:
                        if response.status == 200:
                            file_size = int(response.headers.get('Content-Length'))
                            file_name = os.path.basename(temp_path)
                            async with aiofiles.open(temp_path, 'wb') as file_handle: 
                                with tqdm(total=file_size,desc=file_name,**self.tqdm_kwargs) as pbar:
                                    async for chunk in response.content.iter_any():
                                        await file_handle.write(chunk)
                                        await file_handle.flush()
                                        pbar.update(len(chunk))
                            status = True 
                        else:
                            data = await response.json()
                            # raise Exception("超过最大连接数")
                            print(response.status,data)
                            status = False 
                except asyncio.exceptions.CancelledError as e:
                    status = False 
            return status

    async def controller(self,product_queue,transfer_queue):
        workers = []
        for index in range(self.concurrent_downloads):
            worker = asyncio.create_task(
                self.woker(transfer_queue),
                name=f'下载异步任务-{index}')
            workers.append(worker)
        await asyncio.gather(*workers)
        return


def get_bilibili_video(video_url,save_dir,cache_dir):
    a = bilibilivideo.Bilibili(video_url,save_dir,cache_dir)
    flv_path = a.main()
    print(f"{flv_path}：success!")
    return flv_path

@app_celery.task
def get_bilibili_audio(audio_url,save_dir,cache_dir):
    a = bilibilivideo.Bilibili(audio_url,save_dir,cache_dir)
    video_path = a.main()
    audio_path = video_path.replace(".flv",".mp3")
    video_clip = VideoFileClip(video_path)
    audio = video_clip.audio
    audio.write_audiofile(audio_path)
    video_clip.close()
    os.remove(video_path)
    return


def get_params(tile,start,end,cloud):
    url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products"
    attributes = [
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
        f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {cloud})",
        f"ContentDate/Start gt {start}T00:00:00.000Z",
        f"ContentDate/Start lt {end}T00:00:00.000Z"]
    params = {
        '$filter':" and ".join(attributes),
        '$orderby': "ContentDate/Start desc",
        '$count':'True',
        '$expand': 'Attributes'} # Assets Attributes
    return tile,url,params


def tile_2_info(tile,start,end,cloud,proxies):
    retries = Retry(
            total=10,
            connect=5,
            read=5,
            backoff_factor=0.5,# 0.5
            raise_on_status=True,
            status_forcelist=[104,500,502,503,504])
    adapter = HTTPAdapter(
        max_retries=retries,
        pool_connections=1,
        pool_maxsize=1)
    session = requests.Session() 
    session.proxies.update(proxies)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    session.keep_alive = True
    tile,url,params = get_params(tile,start,end,cloud)
    gettasks = [(tile,url,params)]
    products = []
    while gettasks:
        try:
            tile,url,params = gettasks.pop()
            with session.get(url,params=params) as response:
                if response.status_code == 200:
                    data = response.json()
                    nextlink = data.get("@odata.nextLink")
                    if nextlink: gettasks.append((tile,nextlink,None))
                    for product in data["value"]:
                        products.append(product)
                else: 
                    print(response.status_code,response.json())
        except requests.RequestException as e: 
            print(e)
    session.close()  
    return products


def info_2_record(tile,product):
    product_date = datetime.strptime(
        product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
    wkt = re.search(r"'(.*?)'",product['Footprint']).group(1)
    cloud = list(filter(lambda x:x['Name']=="cloudCover",product['Attributes']))[0]["Value"]
    record = (
        product['Name'].replace(".SAFE",""),
        product['Id'],
        tile,
        cloud,
        product['Online'],
        product_date,
        wkt,
        datetime.now())
    return record


def data_2_table(cargs,data):
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
        INSERT INTO sentinel2_l1c
        (name,id,tile,cloud,online,product_date,geom,create_date) 
        VALUES %s
        ON CONFLICT (name)
        DO UPDATE SET 
        online=excluded.online,
        id=excluded.id,
        tile=excluded.tile,
        cloud=excluded.cloud,
        product_date=excluded.product_date,
        geom=excluded.geom,
        create_date=excluded.create_date;
        """
    execute_values(cursor, query, data)
    cursor.close()
    connection.close()
    return


def qhdm_2_tiles(cargs,qhdm):
    connection = psycopg2.connect(**cargs)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
            SELECT distinct china_tile.tile
            FROM china_tile
            JOIN dt_sy 
            on st_intersects(china_tile.geom,dt_sy.shape)
            WHERE dt_sy.qhdm = '{qhdm}' 
            """
    try:
        cursor.execute(query)
        connection.commit()
        data = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close()
    return data


def table_2_zipfile(account,save_dir,attmpt,agent,product_queue,transfer_queue,logger):
    download = Download(account,agent,save_dir,attmpt,logger)
    threading.Thread(
        target=download.inspector,
        args=(product_queue,transfer_queue),
        name="下载排序线程",
        daemon=True).start()
    asyncio.run(download.controller(product_queue,transfer_queue),debug=True)
    return


def qhdm_2_table(cargs,qhdm,start,end,cloud,proxies):
    records = []
    for tile, in tqdm(qhdm_2_tiles(cargs,qhdm)):
        products = tile_2_info(tile,start,end,cloud,proxies)
        for product in tqdm(products):
            record = info_2_record(tile,product)
            records.append(record)
    data_2_table(cargs,records)      
    return 