#!/data/ygsfb/virtualEnvironment/sentinel2/bin/python
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from datetime import datetime,timedelta
from dateutil.relativedelta import relativedelta
from tqdm import tqdm
import daemonize
import threading
import requests
import psycopg2
import paramiko
import logging
import asyncio
import aiofiles
import aiohttp
import asyncpg
import atexit
import socket
import fcntl
import queue
import sched
import time
import json
import os
import re


def write_to_file(file_path,zip_path,mode):
    with open(file_path, mode=mode) as file_handle:
        try:
            fcntl.flock(file_handle, fcntl.LOCK_EX | fcntl.LOCK_NB) # 尝试获取文件锁
            while True:
                content = yield
                if content is None: 
                    fcntl.flock(file_handle, fcntl.LOCK_UN)
                    os.rename(file_path,zip_path)
                    return
                else:
                    file_handle.write(content)  # 写入内容
                    file_handle.flush()  # 实时将数据写入磁盘
        except BlockingIOError:
            raise Exception("文件已被其他进程占用，无法写入")
 
 
def split_periods(period):
        start_date,end_date = period
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        split_date = datetime.now()-timedelta(days=5)
        if start_date <= split_date <= end_date:
            history_period = (
                start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
            current_period = (
                split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
        elif end_date < split_date:
            history_period = (
                start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
            current_period = None
        elif split_date < start_date :
            history_period = None
            current_period = (
                split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
        else:
            logger.debug("输入日期有误！")
        return history_period,current_period
    

def qhdm_2_tiles(cargs,qhdm):
        connection = psycopg2.connect(**cargs)
        connection.autocommit = True
        cursor = connection.cursor()
        query = f"""
                SELECT distinct china_tile.tile
                FROM china_tile
                JOIN dt_sy 
                on st_intersects(china_tile.geom,dt_sy.shape)
                WHERE dt_sy.qhdm = '{qhdm}' 
                """
        try:
            cursor.execute(query)
            connection.commit()
            data = cursor.fetchall()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close()
        return [i[0] for i in data] 


def updata_table(cargs,data):
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
        INSERT INTO sentinel2_l1c
        (name,id,tile,cloud,online,product_date,geom,create_date) 
        VALUES %s
        ON CONFLICT (name)
        DO UPDATE SET 
        online=excluded.online,
        id=excluded.id,
        tile=excluded.tile,
        cloud=excluded.cloud,
        product_date=excluded.product_date,
        geom=excluded.geom,
        create_date=excluded.create_date;
        """
    execute_values(cursor, query, data)
    cursor.close()
    connection.close()
    return


def analysis_product(tile,products):
    records = []
    downloads = []
    for product in products:
        product_date = datetime.strptime(
            product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
        wkt = re.search(r"'(.*?)'",product['Footprint']).group(1)
        cloud = list(filter(lambda x:x['Name']=="cloudCover",product['Attributes']))[0]["Value"]
        records.append((
            product['Name'].replace(".SAFE",""),
            product['Id'],
            tile,
            cloud,
            product['Online'],
            product_date,
            wkt,
            datetime.now()))
        if product['Online']: # 获取在线可下载数据
            downloads.append([
                product['Name'].replace(".SAFE",""),
                datetime.strptime(product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ"),
                product['Id']])
    return records,downloads


def run_in_thread(name,daemon=True):
    def decorator(func):
        def wrapper(*args, **kwargs):
            thread = threading.Thread(
                target=func, 
                args=args, 
                name=name,
                daemon=daemon,
                kwargs=kwargs)
            thread.start()
            if daemon: return 
            else: return thread
        return wrapper
    return decorator


class Initialization:
    
    def __init__(self,period,save_dir,cargs,table_name):
        self.create_folder(save_dir,period)
        if not self.checker(cargs,table_name):
            self.create_table(cargs,table_name)

    @staticmethod
    def checker(cargs,table_name):
        query = f"""
            SELECT EXISTS (
                SELECT 
                FROM information_schema.tables 
                WHERE table_name='{table_name}');"""
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = connection.cursor()
        try:
            cursor.execute(query)
            connection.commit()
            check_table_result = cursor.fetchall()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close() 
        return check_table_result[0][0]
    
    @staticmethod
    def create_folder(save_dir,period):
        start_date,end_date = period
        start_date = datetime.strptime(start_date, '%Y-%m-%d')
        end_date = datetime.strptime(end_date, '%Y-%m-%d')
        current_date = start_date
        while current_date <= end_date:
            yearmonth = current_date.strftime('%Y%m')
            yearmonth_dir = os.path.join(save_dir,yearmonth)
            if not os.path.exists(yearmonth_dir):os.makedirs(yearmonth_dir)
            current_date += relativedelta(months=1)
        return 

    @staticmethod
    def create_table(cargs,table_name):
        logger.debug(f"Table '{table_name}' does not exist in the database.")
        query = f'''
            CREATE TABLE {table_name} (
                name VARCHAR(255) PRIMARY KEY,
                id VARCHAR(255),
                tile VARCHAR(255),
                cloud NUMERIC(10, 2),
                online bool,
                product_date timestamp,
                geom public.geometry(polygon, 4326),
                zip_path text,
                tif_path text,
                create_date timestamp);
                COMMENT ON COLUMN {table_name}.name IS '产品名称';
                COMMENT ON COLUMN {table_name}.id IS '下载ID';
                COMMENT ON COLUMN {table_name}.tile IS '图幅号';
                COMMENT ON COLUMN {table_name}.cloud IS '含云量';
                COMMENT ON COLUMN {table_name}.online IS '产品是否在线';
                COMMENT ON COLUMN {table_name}.geom IS '有效边界范围';
                COMMENT ON COLUMN {table_name}.product_date IS '产品日期';
                COMMENT ON COLUMN {table_name}.zip_path IS '原始zip存储路径';
                COMMENT ON COLUMN {table_name}.tif_path IS '合成10波段栅格存储路径';
                COMMENT ON COLUMN {table_name}.create_date IS '该条记录更新时间';'''
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = connection.cursor()
        try:
            cursor.execute(query)
            connection.commit()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close() 
        logger.debug(f"Table '{table_name}' exists in the database.")
        return
 

class Downloader:
    interval = 590
    timeout = 60

    def __init__(self,account,proxies,attmpt):
        retries = Retry(
            total=attmpt,
            connect=5,
            read=5,
            backoff_factor=0,# 0.5
            raise_on_status=True,
            status_forcelist=[104,500,502,503,504])
        adapter = HTTPAdapter(
            max_retries=retries,
            pool_connections=1,
            pool_maxsize=1)
        self.data = {
            'grant_type': 'password',
            'username': account['username'],
            'password': account['password'],
            'client_id': 'cdse-public'}
        self.session = requests.Session() 
        self.session.proxies.update(proxies)
        self.session.mount('http://', adapter)
        self.session.mount('https://', adapter)
        self.session.keep_alive = True
        self.refresh_token(self.data,self.interval)

    def __enter__(self):
        return self  # 返回自身或其他对象

    def __exit__(self, exc_type, exc_value, traceback):
        if exc_type is not None:
            print(f"Exception occurred: {exc_type}, {exc_value}")
        else:
            self.close()

    @run_in_thread(name="刷新token线程")
    def refresh_token(self,data,interval):
        try:  # 尝试获得访问token和刷新token
            with self.session.post(
                url="https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
                headers={'Content-Type': 'application/x-www-form-urlencoded'},
                data=data,
                timeout=60) as response:
                if response.status_code == 200: # 若接受，获得访问token和刷新token
                    self.__access_token = response.json()['access_token']
                    interval = response.json()['expires_in']  
                    data = {
                            'grant_type': 'refresh_token',
                            'refresh_token': response.json()['refresh_token'],
                            'client_id': 'cdse-public'}
                else: # 若拒绝，使用账户获得访问token和刷新token
                    print(response.status_code,response.json())
                    interval = 10
                    data = self.data
        except requests.RequestException as e: # 若报错，使用账户获得访问token和刷新token
            print(e)
            interval = 10
            data = self.data
        finally:  # 根据新的请求参数，获得访问token和刷新token
            time.sleep(interval)
            self.refresh_token(data,interval)
        return

    def __download_file(self,productid,zip_path):
        if not os.path.exists(zip_path):  # 文件不存在
            temp_path = zip_path.replace('.zip','.incomplete')
            try:
                coroutine = write_to_file(temp_path,zip_path,'wb') # 创建协程对象
                next(coroutine)  # 尝试启动协程，写入文件
                try: # 执行下载！
                    with self.session.get(
                        url=f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
                        headers={"Authorization": f"Bearer {self.__access_token}"},
                        timeout=self.timeout,
                        stream=True) as response:
                        if response.status_code == 200: # 下载完成状态设置成功
                            for chunk in response.iter_content(chunk_size=1024):
                                if chunk: coroutine.send(chunk) # 如果缓存不为空就传输
                            coroutine.send(None)
                            status = True 
                        else: # 下载完成状态等待1分钟设置失败,
                            print(f"{response.status_code} {response.json()}")
                            time.sleep(60)
                            status = False 
                except requests.RequestException as e: # 下载报错,再次启动下载
                    print(e)
                    time.sleep(60*10)
                    status = self.__download_file(productid,zip_path)
                finally:
                    coroutine.close()
            except Exception as e:  # 如果文件被占用，等待文件处理结束
                print(e)
                while not os.path.exists(zip_path):
                    time.sleep(60)
                status = True 
        else: # 文件存在
            status = True 
        return status

    @run_in_thread(name="下载线程",daemon=False)
    def download_task(self,download_queue,coroutine,record_queue):
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute):time.sleep(1)
        while True:
            priority, element = download_queue.get()
            if element:
                name,zip_path,productid = element
                status = self.__download_file(productid,zip_path)
                if status: # 成功下载到文件，日志记录成功
                    coroutine.send(f"{name} success {datetime.now()}")
                    record_queue.put(zip_path)
                else: # 未能下载到文件
                    coroutine.send(f"{name} success {datetime.now()}")
                    record_queue.put(None)
            else:
                download_queue.put((priority, element))
                break
        return
       
    def close(self):
        self.session.close()
        return


def build_params(tile,period,cloud):
    start, end = period
    attributes = [
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
        f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {cloud})",
        f"ContentDate/Start gt {start}T00:00:00.000Z",
        f"ContentDate/Start lt {end}T00:00:00.000Z"]
    params = {
        '$filter':" and ".join(attributes),
        '$orderby': "ContentDate/Start desc",
        '$count':'True',
        '$expand': 'Attributes'}
    return params


async def schedule(url,tiles,period,cloud,interval,sparams_queue):
    historyp,currentp = split_periods(period)
    if historyp: # 若历史时期存在，则执行立即查询、更新表、加载任务
        for tile in tqdm(tiles,disable=False,desc='history'):
            await sparams_queue.put((url,tile,historyp,cloud))
    if currentp: # 如果近期存在，则执行定时查询、更新表、加载任务
        start_date, end_date = currentp
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        while start_date > datetime.now(): 
            await asyncio.sleep(60*60) # 若开始日期在今天之后，则等待1小时为周期
        while datetime.now() <= end_date + timedelta(days=5): # 今天小于执行结束日期，可执行
            if datetime.now()-timedelta(days=5) > start_date: # 若开始日期在前五天之前，开始日期重置为前五天
                sdate = datetime.now()-timedelta(days=5) 
            else:
                sdate = start_date
            if end_date > datetime.now(): # 若结束日期在今天之后，结束日期重置为明天
                edate = datetime.now()+timedelta(days=1)
            else:
                edate = end_date
            _period = (sdate.strftime("%Y-%m-%d"),edate.strftime("%Y-%m-%d"))
            for tile in tqdm(tiles,disable=False,desc='current'):
                await sparams_queue.put((url,tile,_period,cloud))
            await asyncio.sleep(interval)
    await sparams_queue.put(False)
    return


async def search(proxy,sparams_queue,product_queue):
    timeout = aiohttp.ClientTimeout(total=10)
    while True:
        element = await sparams_queue.get()
        if element is None: break
        url,tile,period,cloud = element
        if url == 'https://catalogue.dataspace.copernicus.eu/odata/v1/Products':
            params = build_params(tile,period,cloud)
        elif url is None: continue
        else: params = None
        async with aiohttp.ClientSession() as session:
            async with session.get(url,params=params,proxy=proxy,timeout=timeout) as response:
                data = await response.json()
                if response.status == 200:
                    nextlink = data.get("@odata.nextLink")
                    await sparams_queue.put((nextlink,tile,period,cloud))
                    await product_queue.put((tile,(data["value"])))
                else:
                    print(response.status,data)
                    await asyncio.sleep(10)
    await product_queue.put(None)
    return


async def parser(cargs,product_queue,rough_queue):
    register=set()
    while True: 
        element = await product_queue.get()
        if element is None :break
        tile, products = element
        records,downloads = analysis_product(tile,products)
        query = f"""
            INSERT INTO sentinel2_l1c
            (name,id,tile,cloud,online,product_date,geom,create_date) 
            VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
            ON CONFLICT (name)
            DO UPDATE SET 
            online=excluded.online,
            id=excluded.id,
            tile=excluded.tile,
            cloud=excluded.cloud,
            product_date=excluded.product_date,
            geom=excluded.geom,
            create_date=excluded.create_date;
            """
        connection = await asyncpg.connect(**cargs)
        await connection.executemany(query,records)
        await connection.close()
        for item in downloads: 
            if item[0] not in register:
                rough_queue.put(item)
                register.add(item[0])
    rough_queue.put(None)  
    return


async def producer(tiles,period,cloud,count,proxy,interval,cargs,rough_queue):
    url = 'https://catalogue.dataspace.copernicus.eu/odata/v1/Products'
    sem = asyncio.Semaphore(count)
    sparams_queue = asyncio.Queue(maxsize=10000)
    product_queue = asyncio.Queue(maxsize=10000)
    workers = [asyncio.create_task(schedule(url,tiles,period,cloud,interval,sparams_queue))]  # 异步装载查询参数
    workers.extend([asyncio.create_task(search(proxy,sparams_queue,product_queue))]*count)  # 多个异步查询数据
    workers.append(asyncio.create_task(parser(cargs,product_queue,rough_queue)))  # 异步更新数据库，并发送下载数据
    await asyncio.gather(*workers)
    return


async def refresher(data,proxy,access_token,start_event,stop_event):
    timeout = aiohttp.ClientTimeout(total=60)
    temp_data = data
    headers={'Content-Type': 'application/x-www-form-urlencoded'}
    url="https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token"
    while stop_event.is_set():
        async with aiohttp.ClientSession() as session:
            async with session.get(url,headers=headers,data=temp_data,proxy=proxy,timeout=timeout) as response:
                result = await response.json()
                if response.status == 200:
                    access_token = result['access_token']
                    if not start_event.is_set(): start_event.set()
                    interval = result['expires_in']
                    temp_data = {
                            'grant_type': 'refresh_token',
                            'refresh_token': response.json()['refresh_token'],
                            'client_id': 'cdse-public'}
                    await asyncio.sleep(2)
                    print(access_token)
                else:
                    print(response.status_code,response.json())
                    await asyncio.sleep(2)
                    temp_data = data
    return


async def downloader(access_token,proxy,dparams_queue,filepath_queue,coroutine):
    timeout = aiohttp.ClientTimeout(total=10)
    while True:
        priority,element = await dparams_queue.get()
        print(element)
        if element is None: break
        productid, file_path = element
        code = os.path.splitext(os.path.basneame(file_path))[0]
        if not os.path.exists(file_path):
            print(access_token)
            async with aiohttp.ClientSession() as session:
                headers = {"Authorization": f"Bearer {access_token}"}
                url = f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value"
                async with session.get(url=url,headers=headers,proxy=proxy,timeout=timeout,stream=True) as response:
                    temp_path = f"{file_path}.incomplete"
                    try:
                        async with aiofiles.open(temp_path, 'wb') as file_handle:
                            fcntl.flock(file_handle, fcntl.LOCK_EX | fcntl.LOCK_NB) # 尝试获取文件锁
                            while True:
                                chunk = await response.content.read(1024)
                                if not chunk: break
                                await file_handle.write(chunk)
                                await file_handle.flush()
                            async with aiofiles.os.rename(temp_path, file_path):pass
                            fcntl.flock(file_handle, fcntl.LOCK_UN)
                            await filepath_queue.put(filepath_queue)
                            coroutine.send(f"{code} success {datetime.now()}")
                    except BlockingIOError:
                        await asyncio.sleep(2) # raise Exception("文件已被其他进程占用，无法写入")
                        await dparams_queue.put((priority,element))     
        else:
            coroutine.send(f"{code} success {datetime.now()}")
            await filepath_queue.put(filepath_queue)
    await dparams_queue.put((priority,element)) 
    coroutine.send(None)        
    return


async def multiasync(access_token,proxy,dparams_queue,filepath_queue,coroutine,stop_event,start_event):
    await start_event.wait()  # 等待事件触发
    workers = [asyncio.create_task(downloader(access_token,proxy,dparams_queue,filepath_queue,coroutine))]*4
    await asyncio.gather(*workers)
    stop_event.set()
    return


async def checker(file_path,save_dir,rough_queue,dparams_queue,coroutine): # write_queue
    exist_codes = []
    async with aiofiles.open(file_path, mode='r') as file_handle:
        line = await file_handle.readline()
        if "success" in line: 
            code = line.split(" ")[0]
            exist_codes.append(code)
    
    while True:
        element = rough_queue.get()
        if element is None: break
        code, date, pdid = element   
        path = os.path.join(save_dir,date.strftime("%Y%m"),f"{code}.zip")
        if code not in exist_codes:
            if not os.path.exists(path):
                priority = -int(date.strftime("%Y%m%d"))
                await dparams_queue.put((priority,(pdid, path)))
            else:
                # await write_queue.put(f"{code} success {datetime.now()}")
                # await asyncio.sleep(1)
                coroutine.send(f"{code} success {datetime.now()}")
    dparams_queue.put((priority,element))
    return


def writer(task_id):
    file_path = f"{task_id}.log"
    with open(file_path, mode='a+') as file_handle:    
        while True:
            line = yield
            if line is None: break
            file_handle.write(line+"\n")
            file_handle.flush()
    return


async def writer_(file_path,write_queue,stop_event):
    while not stop_event.is_set():
        element = await write_queue.get()
        if element is None: break
        async with aiofiles.open(file_path, mode='a+') as file_handle:
            await file_handle.write(element+"\n")
            await file_handle.flush()
        # all_elements = []
        # while (not write_queue.empty()) or (not all_elements):
        #     element = await write_queue.get()
        #     all_elements.append(element)
        # print(all_elements)
        # if all_elements[-1] is None:
        #     content = all_elements[:-1]
        #     async with aiofiles.open(file_path, mode='a+') as file_handle:
        #         await file_handle.write("\n".join(content))
        #         await file_handle.flush()
        #     break
        # else:
        #     async with aiofiles.open(file_path, mode='a+') as file_handle:
        #         await file_handle.write("\n".join(all_elements))
        #         await file_handle.flush()
    return


async def consumer(task_id,account,save_dir,proxy,count,rough_queue):
    
    stop_event = asyncio.Event()
    start_event = asyncio.Event()
    # sem = asyncio.Semaphore(count)
    dparams_queue = asyncio.PriorityQueue()
    write_queue = asyncio.Queue(maxsize=10000)
    filepath_queue = asyncio.Queue(maxsize=10000)
    access_token = None
    data = {
        'grant_type': 'password',
        'username': account['username'],
        'password': account['password'],
        'client_id': 'cdse-public'}
    workers = []
    workers = [asyncio.create_task(multiasync(access_token,proxy,dparams_queue,filepath_queue,coroutine,stop_event,start_event))]
    workers.append(asyncio.create_task(checker(file_path,save_dir,rough_queue,dparams_queue,coroutine))) # write_queue
    workers.append(asyncio.create_task((refresher(data,proxy,access_token,start_event,stop_event))))
    # workers.append(asyncio.create_task(writer(file_path,write_queue,stop_event)))
    await asyncio.gather(*workers)
    
    return


@run_in_thread(name="线程",daemon=False)
def run_async(func,args):
    asyncio.run(func(*args))
    return


def main(task_id,qhdm,period,account_index):
    
    with open('./config.json',"r") as file:
        params = json.load(file)
        proxy = params["proxy"]
        accounts = params["accounts"]
        cargs = params["cargs"]
        save_dir = params["save_dir"]
        table_name = params["table_name"]
        interval = params['interval']
        ssh_args = params['ssh_args']
        remote_dir = params['remote_dir']
    
    count = 4
    cloud = 100
    interval = 8*60*60
    tiles = qhdm_2_tiles(cargs,qhdm)
    account = accounts[account_index]
    
    write_coroutine = writer(task_id)
    next(write_coroutine)
    
    # proxy = {
    #     'http': f"{proxy['ip']}:{proxy['port']}",
    #     'https': f"{proxy['ip']}:{proxy['port']}"}
    
    proxy = "http://192.168.2.172:10809"
    # 搜索队列
    # sparams_queue = queue.Queue(maxsize=10000) 
    # 产品队列
    rough_queue = queue.Queue(maxsize=10000)

    # 生产者
    args = (tiles,period,cloud,count,proxy,interval,cargs,rough_queue)
    producer_thread = run_async(producer,args)
    
    # 消费者
    args = (task_id,account,save_dir,proxy,count,rough_queue)
    consumer_thread = run_async(consumer,args)


    producer_thread.join()
    consumer_thread.join()
    write_coroutine.close()
    return



if __name__ == "__main__":
    task_id = 'test1234'
    qhdm = '41'
    period = ['2024-02-20', '2024-03-09']
    account_index = 0
    # main(task_id,qhdm,period,account_index)
    main(task_id,qhdm,period,account_index)
    # file_path = "tes t.txt"
    # contents = ["asdfa"] * 100
    # write_to_file(file_path, contents)

    
    



    
  


