#!/data/ygsfb/virtualEnvironment/sentinel2/bin/python
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from datetime import datetime,timedelta
from dateutil.relativedelta import relativedelta
from tqdm import tqdm
import daemonize
import threading
import requests
import psycopg2
import paramiko
import logging
import atexit
import socket
import queue
import sched
import time
import json
import os
import re


class Initialization:
    
    def __init__(self,period,save_dir,cargs,table_name):
        self.create_folder(save_dir,period)
        if not self.checker(cargs,table_name):
            self.create_table(cargs,table_name)
    
    @staticmethod
    def checker(cargs,table_name):
        query = f"""
            SELECT EXISTS (
                SELECT 
                FROM information_schema.tables 
                WHERE table_name='{table_name}');"""
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = connection.cursor()
        try:
            cursor.execute(query)
            connection.commit()
            check_table_result = cursor.fetchall()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close() 
        return check_table_result[0][0]
    
    @staticmethod
    def create_folder(save_dir,period):
        start_date,end_date = period
        start_date = datetime.strptime(start_date, '%Y-%m-%d')
        end_date = datetime.strptime(end_date, '%Y-%m-%d')
        current_date = start_date
        while current_date <= end_date:
            yearmonth = current_date.strftime('%Y%m')
            yearmonth_dir = os.path.join(save_dir,yearmonth)
            if not os.path.exists(yearmonth_dir):os.makedirs(yearmonth_dir)
            current_date += relativedelta(months=1)
        return 

    @staticmethod
    def create_table(cargs,table_name):
        logger.debug(f"Table '{table_name}' does not exist in the database.")
        query = f'''
            CREATE TABLE {table_name} (
                name VARCHAR(255) PRIMARY KEY,
                id VARCHAR(255),
                tile VARCHAR(255),
                cloud NUMERIC(10, 2),
                online bool,
                product_date timestamp,
                geom public.geometry(polygon, 4326),
                zip_path text,
                tif_path text,
                create_date timestamp);
                COMMENT ON COLUMN {table_name}.name IS '产品名称';
                COMMENT ON COLUMN {table_name}.id IS '下载ID';
                COMMENT ON COLUMN {table_name}.tile IS '图幅号';
                COMMENT ON COLUMN {table_name}.cloud IS '含云量';
                COMMENT ON COLUMN {table_name}.online IS '产品是否在线';
                COMMENT ON COLUMN {table_name}.geom IS '有效边界范围';
                COMMENT ON COLUMN {table_name}.product_date IS '产品日期';
                COMMENT ON COLUMN {table_name}.zip_path IS '原始zip存储路径';
                COMMENT ON COLUMN {table_name}.tif_path IS '合成10波段栅格存储路径';
                COMMENT ON COLUMN {table_name}.create_date IS '该条记录更新时间';'''
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = connection.cursor()
        try:
            cursor.execute(query)
            connection.commit()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close() 
        logger.debug(f"Table '{table_name}' exists in the database.")
        return
 
 
class Searcher:
    # 数字越小优先级越高
    # seen_set确保已近装载过的元素不会再次装载到队列中
    # 在Python中，`list`是不可哈希的类型，因此不能直接作为集合的元素。
    # 如果您需要将列表作为集合的元素，可以将其转换为元组，因为元组是可哈希的类型。
    url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products"
    attempt = 10
    interval = 60*60*8
    seen_set = set()
    
    def __init__(self,qhdm,period,savedir,cargs,proxy,cloud=100.0):
        self.qhdm = qhdm
        self.savedir = savedir
        self.cloud = cloud
        self.cargs = cargs
        self.history_period,self.current_period = self.split_periods(period) 
        self.session = requests.Session()
        self.session.proxies.update( {
            'http': f"{proxy['ip']}:{proxy['port']}",
            'https': f"{proxy['ip']}:{proxy['port']}"})
        
    def __enter__(self):
        return self  # 返回自身或其他对象

    def __exit__(self, exc_type, exc_value, traceback):
        if exc_type is not None:
            print(f"Exception occurred: {exc_type}, {exc_value}")
        else:
            self.close()
    
    @staticmethod
    def qhdm_2_tiles(cargs,qhdm):
        connection = psycopg2.connect(**cargs)
        connection.autocommit = True
        cursor = connection.cursor()
        query = f"""
                SELECT distinct china_tile.tile
                FROM china_tile
                JOIN dt_sy 
                on st_intersects(china_tile.geom,dt_sy.shape)
                WHERE dt_sy.qhdm = '{qhdm}' 
                """
        try:
            cursor.execute(query)
            connection.commit()
            data = cursor.fetchall()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close()
        return [i[0] for i in data] 
    
    @staticmethod
    def split_periods(period):
        start_date,end_date = period
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        split_date = datetime.now()-timedelta(days=5)
        if start_date <= split_date <= end_date:
            history_period = (
                start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
            current_period = (
                split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
        elif end_date < split_date:
            history_period = (
                start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
            current_period = None
        elif split_date < start_date :
            history_period = None
            current_period = (
                split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
        else:
            logger.debug("输入日期有误！")
        return history_period,current_period

    @staticmethod
    def __split_update(tile,products):
        # 整理需要入库的数据
        data_update = []
        for product in products:
            product_date = datetime.strptime(
                product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
            wkt = re.search(r"'(.*?)'",product['Footprint']).group(1)
            cloud = list(filter(
                lambda x:x['Name']=="cloudCover",
                product['Attributes']))[0]["Value"]
            data_update.append((
                product['Name'].replace(".SAFE",""),
                product['Id'],
                tile,
                cloud,
                product['Online'],
                product_date,
                wkt,
                datetime.now()))  
        return data_update
    
    def __add_queue(self,download_queue,products):
        for product in products:
            if product['Online']: # 获取在线可下载数据
                product_date = datetime.strptime(
                    product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
                name = product['Name'].replace(".SAFE","")
                zip_path = os.path.join(
                    self.savedir,product_date.strftime("%Y%m"),f"{name}.zip")
                record = (
                    name,
                    zip_path,
                    product['Id'],
                    self.attempt)
                if name not in self.seen_set: # 如果数据未在记录集合中，加入集合和队列
                    self.seen_set.add(name)
                    priority = int(product_date.strftime("%Y%m%d"))
                    download_queue.put((-priority,record)) 
        return
 
    def __searcher_link(self,url,params=None):
        products = []
        try:
            with self.session.get(url,params=params) as response:
                result = response.json()
                if response.status_code == 200:
                    if "@odata.nextLink" in result.keys():
                        nextlink = result["@odata.nextLink"]
                        products.extend(self.__searcher_link(nextlink))
                    products.extend(result["value"])
                else:
                    print(response.status_code,response.json())
        except:
            print("网络或VPN问题！")
        return products

    def __searcher_tile(self,tile,period):
        start, end = period
        attributes = [
            f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
            f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
            f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {self.cloud})",
            f"ContentDate/Start gt {start}T00:00:00.000Z",
            f"ContentDate/Start lt {end}T00:00:00.000Z"]
        payload = {
            '$filter':" and ".join(attributes),
            '$orderby': "ContentDate/Start desc",
            '$count':'True',
            '$expand': 'Attributes'} # Assets Attributes
        products = self.__searcher_link(self.url,payload)
        return products

    @staticmethod
    def __updata_table(cargs,tile,products):
        data = Searcher.__split_update(tile,products)
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        connection.autocommit = True
        cursor = connection.cursor()
        query = f"""
            INSERT INTO sentinel2_l1c
            (name,id,tile,cloud,online,product_date,geom,create_date) 
            VALUES %s
            ON CONFLICT (name)
            DO UPDATE SET 
            online=excluded.online,
            id=excluded.id,
            tile=excluded.tile,
            cloud=excluded.cloud,
            product_date=excluded.product_date,
            geom=excluded.geom,
            create_date=excluded.create_date;
            """
        execute_values(cursor, query, data)
        cursor.close()
        connection.close()
        return
    
    def search_update_load(self,period,download_queue,desc):
        tiles = self.qhdm_2_tiles(self.cargs,self.qhdm)
        for tile in tqdm(tiles,disable=False,desc=desc):
            products = self.__searcher_tile(tile,period)
            self.__updata_table(self.cargs,tile,products)
            self.__add_queue(download_queue,products)
        return
    
    def execute(self,download_queue):
        if self.history_period: # 若历史时期存在，则执行立即查询、更新表、加载任务
            self.search_update_load(self.history_period,download_queue,'history')
        if self.current_period: # 如果近期存在，则执行定时查询、更新表、加载任务
            start_date, end_date = self.current_period
            start_date = datetime.strptime(start_date,"%Y-%m-%d")
            end_date = datetime.strptime(end_date,"%Y-%m-%d")
            while start_date > datetime.now(): # 若开始日期在今天之后，则等待
                time.sleep(60*60) # 1小时为周期
            while datetime.now() <= end_date + timedelta(days=5): # 今天小于执行结束日期，可执行
                if datetime.now()-timedelta(days=5) > start_date: # 若开始日期在前五天之前，开始日期重置为前五天
                    sdate = datetime.now()-timedelta(days=5) 
                else:
                    sdate = start_date
                if end_date > datetime.now(): # 若结束日期在今天之后，结束日期重置为明天
                    edate = datetime.now()+timedelta(days=1)
                else:
                    edate = end_date
                period = (sdate.strftime("%Y-%m-%d"),edate.strftime("%Y-%m-%d"))
                self.search_update_load(period,download_queue,'current')
                time.sleep(self.interval)  # 8小时为周期
        return
    
    def close(self):
        self.session.close()
        return


class Downloader:
    retries = Retry(
        total=5,
        connect=5,
        read=5,
        backoff_factor=0,# 0.5
        raise_on_status=True,
        status_forcelist=[104,500,502,503,504])
    adapter = HTTPAdapter(
        max_retries=retries,
        pool_connections=1,
        pool_maxsize=1)
    tokenurl = "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token"
    tokenheaders = {'Content-Type': 'application/x-www-form-urlencoded'}
    interval = 600
    timeout = 60

    min_priority = None
    download_queue = queue.PriorityQueue(maxsize=10000)
    is_exist_queue = queue.Queue(maxsize=10000)
    
    def __init__(self,task_id,save_dir,account,proxy):
        self.save_dir = save_dir
        self.account = account
        self.log_file = f"{task_id}.log"
        self.proxies = {
            'http': f"{proxy['ip']}:{proxy['port']}",
            'https': f"{proxy['ip']}:{proxy['port']}"}
        self.session = requests.Session() 
        self.session.proxies.update(self.proxies)
        self.session.headers.update(self.tokenheaders)
        self.session.mount('http://', self.adapter)
        self.session.mount('https://', self.adapter)
        self.session.keep_alive = True

    def __enter__(self):
        return self  # 返回自身或其他对象

    def __exit__(self, exc_type, exc_value, traceback):
        if exc_type is not None:
            print(f"Exception occurred: {exc_type}, {exc_value}")
        else:
            self.close()

    @property
    def exist_names(self):
        exist_names = set()
        if os.path.exists(self.log_file):
            with open(self.log_file, 'r') as file:
                for line in file.readlines():
                    if "success" in line:
                        name = line.split(" ")[0]
                        exist_names.add(name)
        return exist_names
    
    def __write_log(self):
        with open(self.log_file, 'a+') as file:
            while True:
                line = self.is_exist_queue.get()
                file.write(line + '\n')
                file.flush()
                self.is_exist_queue.task_done()
        return
    
    def __refresh_token(self):
        data = {
            'grant_type': 'password',
            'username': self.account['username'],
            'password': self.account['password'],
            'client_id': 'cdse-public'}
        while True:
            with self.session.post(
                url=self.tokenurl,data=data,timeout=60) as response:
                if response.status_code == 200:
                    self.__access_token = response.json()['access_token'] 
                    interval = response.json()['expires_in']
                    refresh_token = response.json()['refresh_token']  
                else:
                    self.logger.debug("token报错",response.json())
            data = {
                'grant_type': 'refresh_token',
                'refresh_token': refresh_token,
                'client_id': 'cdse-public'} 
            time.sleep(2)
        return

    def __download_file(self,productid,zip_path):
        try: # 执行下载！
            with self.session.get(
                url=f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value",
                headers={"Authorization": f"Bearer {self.__access_token}"},
                timeout=self.timeout,
                stream=True) as response:
                if response.status_code == 200: # 下载完成状态设置成功
                    with open(zip_path.replace('.zip','.incomplete'), "wb") as zipfile:
                        for chunk in response.iter_content(chunk_size=1024):
                            if chunk: 
                                zipfile.write(chunk)
                                zipfile.flush()  # 实时将数据写入磁盘 
                    os.rename(zip_path.replace('.zip','.incomplete'),zip_path)
                    status = True 
                else: # 下载完成状态设置失败
                    print(response.status_code,response.json())
                    status = False 
        except Exception as e: # 下载报错,状态设置失败
            print(e)
            status = False
        return status
   
    def __check_file_exist(self,search_queue,exist_names):
        while True:
            priority, element = search_queue.get()
            name,zip_path,productid,attempt = element
            if name not in exist_names:  # 未在下载记录中
                if not os.path.exists(zip_path): # 文件不存在本地
                    # print("未在下载记录中,文件未在本地")
                    self.download_queue.put((priority, element))
                else: # 文件存在本地
                    # print("未在下载记录中,文件存在本地")
                    self.is_exist_queue.put(f"{name} success {datetime.now()}")
            else: # 已在下载记录中
                # print("已在下载记录中")
                continue
        return
   
    def __execute(self,in_event):
        while True:
            priority, element = self.download_queue.get()
            name,zip_path,productid,attempt = element
            status = self.__download_file(productid,zip_path)
            if status: # 成功下载到文件，日志记录成功
                self.is_exist_queue.put(f"{name} success {datetime.now()}")
            else: # 未能下载到文件
                if attempt != 0: # 如果尝试下载次数不等于0，尝试下载次数递减，重新放入下载队列
                    attempt = attempt - 1
                    self.download_queue.put((priority,element))
                else:  # 如果尝试下载次数等于0,不再下载，日志记录失败
                    self.is_exist_queue.put(f"{name} failure {datetime.now()}")
        return
    
    def pool(self,in_event,search_queue):
        
        refresh_token_thread = threading.Thread(
            target=self.__refresh_token,
            name="刷新token线程",
            daemon=True)
        refresh_token_thread.start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute):time.sleep(1)
        
        write_thread = threading.Thread(
            target=self.__write_log,
            name="日志更新记录线程",
            daemon=True)
        write_thread.start()
        
        update_log_task_thread = threading.Thread(
            target=self.__check_file_exist,
            name='更新日志与任务队列线程',
            args=(search_queue,self.exist_names),
            daemon=False)
        update_log_task_thread.start()
        
        download_threads = []
        for i in range(4):
            t = threading.Thread(
                target=self.__execute,
                args=(in_event,),
                daemon=False,
                name=f"下载线程:{i}")
            download_threads.append(t)
        for t in download_threads:t.start()
        for t in download_threads:t.join()
        
        update_log_task_thread.join()
        return
    
    def close(self):
        self.session.close()
        self.queue_info.join()
        return


def search(qhdm,period,savedir,cargs,proxy,download_queue):
    with Searcher(qhdm,period,savedir,cargs,proxy) as searcher:
        searcher.execute(download_queue)
    return


def sender(ssh_args,remote_dir,in_event,in_queue):
    ssh = paramiko.SSHClient()
    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    ssh.connect(*ssh_args)
    sftp = ssh.open_sftp()
    while not(in_event.is_set() and in_queue.empty()):
        priority,local_path = in_queue.get()
        if not os.path.exists(local_path):
            logger.debug(f"{local_path}下载失败")
            continue
        file_name = "/".join(local_path.split('/')[-2:])
        remote_path = os.path.join(remote_dir,file_name)
        try:
            sftp.stat(remote_path)
        except FileNotFoundError:
            temp_path = remote_path.replace('.zip', '.incomplete')
            sftp.put(local_path, temp_path, confirm=True)
            sftp.rename(temp_path, remote_path)
    sftp.close()
    ssh.close()
    logger.debug("发送进程结束！")
    return
 
 
def main(task_id,qhdm,period,account_index):
    with open('./config.json',"r") as file:
        params = json.load(file)
        proxy = params["proxy"]
        accounts = params["accounts"]
        cargs = params["cargs"]
        save_dir = params["save_dir"]
        table_name = params["table_name"]
        interval = params['interval']
        ssh_args = params['ssh_args']
        remote_dir = params['remote_dir']
    account = params["accounts"][account_index]
    # 初始化
    Initialization(period,save_dir,cargs,table_name)
    # 存储下载的参数
    download_queue = queue.PriorityQueue(maxsize=10000)
    # 控制下载线程
    download_event = threading.Event()
    
    # 查询数据线程
    search_thread = threading.Thread(
        target=search,
        args=(qhdm,period,save_dir,cargs,proxy,download_queue),
        name='查询数据线程')
    search_thread.setDaemon(False)
    search_thread.start()
    
    # 下载数据线程
    with Downloader(task_id,save_dir,account,proxy) as downloader:
        downloader.pool(download_event,download_queue)

   
    search_thread.join()
    return 


def handle_task(qhdm,period,account_index):
    with open('./config.json',"r") as file:
        params = json.load(file)
        proxy = params["proxy"]
        accounts = params["accounts"]
        cargs = params["cargs"]
        save_dir = params["save_dir"]
        table_name = params["table_name"]
        interval = params['interval']
        ssh_args = params['ssh_args']
        remote_dir = params['remote_dir']

    proxies = {
        'http': f"{proxy['ip']}:{proxy['port']}",
        'https': f"{proxy['ip']}:{proxy['port']}"}
 

    # 存储查询的参数
    search_queue = queue.PriorityQueue(maxsize=1000)
    # 控制查询线程
    search_event = threading.Event()
    # 存储下载的参数
    download_queue = queue.PriorityQueue(maxsize=2000)
    # 控制下载线程
    download_event = threading.Event()
    # 存储发送的参数
    send_queue = queue.PriorityQueue(maxsize=1000)
    # 控制发送线程
    send_event = threading.Event()

    loader_thread = threading.Thread(
        target=loader,
        args=(tiles,periods,interval,search_event,search_queue),
        name='装载数据线程')
    loader_thread.setDaemon(False)
    loader_thread.start()
    logger.debug("开始装载")
    
    search_thread = threading.Thread(
        target=search,
        args=(
            proxy,
            search_event,
            search_queue,
            download_event,
            download_queue),
        name='查询数据线程')
    search_thread.setDaemon(False)
    search_thread.start()
    logger.debug("开始查询")
    
    download_thread = threading.Thread(
        target=download,
        args=(
            save_dir,
            account,
            proxy,
            processes,
            download_event,
            download_queue,
            send_event,
            send_queue),
        name='下载数据线程')
    download_thread.setDaemon(False)
    download_thread.start()
    logger.debug("开始下载")
    
    sender_thread = threading.Thread(
        target=sender,
        args=(
            ssh_args,
            remote_dir,
            send_event,
            send_queue),
        name='发送数据线程')
    sender_thread.setDaemon(False)
    sender_thread.start()
    logger.debug("开始发送")
    
    loader_thread.join()
    search_thread.join()
    access_thread.join()
    download_thread.join()
    sender_thread.join()
    return


if __name__ == "__main__":
    task_id = 'test123'
    qhdm = '41'
    period = ['2024-02-1', '2024-03-10']
    account_index = 0
    main(task_id,qhdm,period,account_index)
    
    # handle_task(qhdm,period,account_index)
    # daemon = daemonize.Daemonize(
    #     app="CopernicusSentinel2", 
    #     chdir=r'/data/ygsfb/projects/sentinel-raster-vector',
    #     pid=r'/tmp/CopernicusSentinel2.pid',
    #     keep_fds=keep_fds, 
    #     action=lambda:handle_task(qhdm,period,account_index))
    # daemon.start()
    
    



    
  


