#!/data/ygsfb/virtualEnvironment/sentinel2/bin/python
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime,timedelta
from dateutil.relativedelta import relativedelta
from multiprocessing.pool import ThreadPool
from tqdm import tqdm
import daemonize
import threading
import requests
import psycopg2
import paramiko
import logging
import atexit
import socket
import queue
import sched
import time
import json
import sys
import os
import re

    
output_file = r"/tmp/CopernicusSentinel2.log"
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG) # INFO DEBUG
logger.propagate = False
file_handler = logging.FileHandler(output_file, "w")
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
keep_fds = [file_handler .stream.fileno()]


class Searcher:
    url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products"

    def __init__(self,proxy,cloud=100.0):
        self.cloud = cloud
        self.proxies = {
            'http': f"{proxy['ip']}:{proxy['port']}",
            'https': f"{proxy['ip']}:{proxy['port']}"}
        self.proxies = {
            'http': f"{proxy['ip']}:{proxy['port']}",
            'https': f"{proxy['ip']}:{proxy['port']}"}
        self.session = requests.Session()
        # 将代理添加到会话中
        self.session.proxies.update(self.proxies)
        
    def searcher_data(self,tile,products):
        data_update = []
        data_download = []
        for product in products:
            product_date = datetime.strptime(
                product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
            wkt = re.search(r"'(.*?)'",product['Footprint']).group(1)
            cloud = list(filter(
                lambda x:x['Name']=="cloudCover",
                product['Attributes']))[0]["Value"]
            data_update.append((
                product['Name'].replace(".SAFE",""),
                product['Id'],
                tile,
                cloud,
                product['Online'],
                product_date,
                wkt,
                datetime.now()))
            if product['Online']:
                data_download.append([
                    product['Name'].replace(".SAFE",""),
                    product_date,
                    product['Id']])
        return data_update,data_download
    
    def searcher_link(self,url,params=None):
        products = []
        try:
            with self.session.get(url,params=params) as response:
                result = response.json()
                if response.status_code == 200:
                    if "@odata.nextLink" in result.keys():
                        nextlink = result["@odata.nextLink"]
                        products.extend(self.searcher_link(nextlink))
                    products.extend(result["value"])
                else:
                    logger.debug(response.status_code,response.json())
        except:
            logger.debug("网络或VPN问题！")
        return products

    def searcher_tile(self,tile,period):
        start, end = period
        attributes = [
            f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
            f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
            f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {self.cloud})",
            f"ContentDate/Start gt {start}T00:00:00.000Z",
            f"ContentDate/Start lt {end}T00:00:00.000Z"]
        payload = {
            '$filter':" and ".join(attributes),
            '$orderby': "ContentDate/Start desc",
            '$count':'True',
            '$expand': 'Attributes'} # Assets Attributes
        products = self.searcher_link(self.url,payload)
        data_update,data_download = self.searcher_data(tile,products)
        return data_update,data_download

    def close(self):
        self.session.close()
        return


def get_tiles(cargs,qhdm):
    connection = psycopg2.connect(**cargs)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
            SELECT distinct china_tile.tile
            FROM china_tile
            JOIN dt_sy 
            on st_intersects(china_tile.geom,dt_sy.shape)
            WHERE dt_sy.qhdm = '{qhdm}' 
            """
    try:
        cursor.execute(query)
        connection.commit()
        data = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close()
    return [i[0] for i in data]


def get_periods(period):
    start_date,end_date = period
    start_date = datetime.strptime(start_date,"%Y-%m-%d")
    end_date = datetime.strptime(end_date,"%Y-%m-%d")
    split_date = datetime.now()-timedelta(days=5)
    if start_date <= split_date <= end_date:
        history_period = (
            start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
        current_period = (
            split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
    elif end_date < split_date:
        history_period = (
            start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
        current_period = None
    elif split_date < start_date :
        history_period = None
        current_period = (
            split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
    else:
        logger.debug("输入日期有误！")
    return history_period,current_period


def create_folder(save_dir,period):
    start_date,end_date = period
    start_date = datetime.strptime(start_date, '%Y-%m-%d')
    end_date = datetime.strptime(end_date, '%Y-%m-%d')
    current_date = start_date
    while current_date <= end_date:
        yearmonth = current_date.strftime('%Y%m')
        yearmonth_dir = os.path.join(save_dir,yearmonth)
        if not os.path.exists(yearmonth_dir):os.makedirs(yearmonth_dir)
        current_date += relativedelta(months=1)
    return 


def loader(tiles,periods,interval,out_event,out_queue):
 
    def assistant(tiles,current_period,out_queue):
        today = datetime.now()
        start_date, end_date = current_period
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        if today-timedelta(days=5) > start_date:
            start_date = today-timedelta(days=5) 
        elif start_date > today:
            return
        if end_date > today:
            end_date = today+timedelta(days=1)
        elif today-timedelta(days=5) > end_date:
            return
        start_date = start_date.strftime("%Y-%m-%d")
        end_date = end_date.strftime("%Y-%m-%d")
        period = (start_date,end_date)
        for tile in tiles:out_queue.put((0,(tile,period)))
        return
    
    scheduler = sched.scheduler(timefunc=time.time,delayfunc=time.sleep)
    history_period,current_period = periods
    if history_period:
        for tile in tiles:out_queue.put((1,(tile,history_period))) 
            
    if current_period: 
        end_date = datetime.strptime(current_period[-1],"%Y-%m-%d")
        finish_date = end_date + timedelta(days=5)
        # finish_date = datetime.now() + timedelta(seconds=10)
        argument = (tiles,current_period,out_queue)
        scheduler.enterabs(time.time(),1,assistant,argument)
        while datetime.now() < finish_date:
            scheduler.run()
            delay = time.time()+interval
            scheduler.enterabs(delay,1,assistant,argument)  
            time.sleep(1)  # 休眠1秒
    out_event.set()
    logger.debug("装载进程结束！")
    return


def search(proxy,in_event,in_queue,out_event_1,out_queue_1,out_event_2,out_queue_2):
    # seen_set确保已近装载过的元素不会再次装载到队列中
    # 在Python中，`list`是不可哈希的类型，因此不能直接作为集合的元素。
    # 如果您需要将列表作为集合的元素，可以将其转换为元组，因为元组是可哈希的类型。
    seen_set = set()
    searcher = Searcher(proxy,cloud=100.0)
    while not (in_event.is_set() and in_queue.empty()):
        priority,(tile,period) = in_queue.get()
        if not period: continue
        records, zipdata = searcher.searcher_tile(tile,period)
        out_queue_1.put(records)
        for i in zipdata:
            tuple_i = tuple(i)
            if tuple_i not in seen_set:
                seen_set.add(tuple_i)
                out_queue_2.put((priority,i)) 
    if not out_event_1.is_set():out_event_1.set()
    if not out_event_2.is_set():out_event_2.set()
    searcher.close()
    logger.debug("查询进程结束！")
    return


def access(cargs,in_event,in_queue):
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    connection.autocommit = True
    cursor = connection.cursor()
    while not (in_event.is_set() and in_queue.empty()):
        args_list = in_queue.get()
        query = f"""
            INSERT INTO sentinel2_l1c
            (name,id,tile,cloud,online,product_date,geom,create_date) 
            VALUES %s
            ON CONFLICT (name)
            DO UPDATE SET 
            online=excluded.online,
            id=excluded.id,
            tile=excluded.tile,
            cloud=excluded.cloud,
            product_date=excluded.product_date,
            geom=excluded.geom,
            create_date=excluded.create_date;
            """
        execute_values(cursor, query, args_list)
    cursor.close()
    connection.close()
    logger.debug("入库进程结束！")
    return


class Downloader:
    retries = Retry(
        total=5,
        connect=5,
        read=5,
        backoff_factor=0,# 0.5
        raise_on_status=True,
        status_forcelist=[104,500,502,503,504])
    adapter = HTTPAdapter(
        max_retries=retries,
        pool_connections=1,
        pool_maxsize=1)
    tokenurl = "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token"
    tokenheaders = {'Content-Type': 'application/x-www-form-urlencoded'}
    interval = 600
    timeout = 60
    processes = 4
    
    def __init__(self,save_dir,account,proxy):
        self.save_dir = save_dir
        self.account = account
        self.proxies = {
            'http': f"{proxy['ip']}:{proxy['port']}",
            'https': f"{proxy['ip']}:{proxy['port']}"}
        self.session = requests.Session() 
        self.session.proxies.update(self.proxies)
        self.session.headers.update(self.tokenheaders)
        self.session.mount('http://', self.adapter)
        self.session.mount('https://', self.adapter)
        self.session.keep_alive = True
        
    def __enter__(self):
        # 进入 with 语句块时的操作
        threading.Thread(target=self.__refresh_token,daemon=True).start()
        private_attribute = f'_{self.__class__.__name__}__access_token'
        while not hasattr(self, private_attribute):time.sleep(1)
        return self  # 返回自身或其他对象

    def __exit__(self, exc_type, exc_value, traceback):
        # 退出 with 语句块时的操作
        # 可以在这里处理异常情况
        if exc_type is not None:
            logger.debug(f"Exception occurred: {exc_type}, {exc_value}")
        else:
            self.close()

    def __refresh_token(self):
        data = {
            'grant_type': 'password',
            'username': self.account['username'],
            'password': self.account['password'],
            'client_id': 'cdse-public'}
        while True:
            with self.session.post(
                url=self.tokenurl,data=data,timeout=60) as response:
                if response.status_code == 200:
                    self.__access_token = response.json()['access_token'] 
                    interval = response.json()['expires_in']
                    refresh_token = response.json()['refresh_token']  
                else:
                    logger.debug("token报错",response.json())
            data = {
                'grant_type': 'refresh_token',
                'refresh_token': refresh_token,
                'client_id': 'cdse-public'} 
            time.sleep(2)
        return
    
    def __download_file(self,in_event,in_queue,out_queue):
        while not(in_event.is_set() and in_queue.empty()):
            priority,(name,date,productid) = in_queue.get()
            zip_path = os.path.join(self.save_dir,date.strftime("%Y%m"),f"{name}.zip")
            if os.path.exists(zip_path):
                out_queue.put((priority,zip_path))
            else:
                incomplete_path = zip_path.replace('.zip','.incomplete') 
                url = f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({productid})/$value"
                with self.session.get(
                    url,
                    headers={"Authorization": f"Bearer {self.__access_token}"},
                    timeout=self.timeout,
                    stream=True) as response:
                    if response.status_code == 200:
                        with open(incomplete_path, "wb") as zipfile:
                            for chunk in response.iter_content(chunk_size=1024):
                                if chunk: 
                                    zipfile.write(chunk)
                                    zipfile.flush()  # 实时将数据写入磁盘 
                        os.rename(incomplete_path,zip_path)
                        out_queue.put((priority,zip_path))
                    else:
                        logger.debug(name,response.status_code,response.json())
        return
    
    def execute(self,in_event,in_queue,out_queue):
        threads = []
        for i in range(self.processes):
            t = threading.Thread(
                target=self.__download_file,
                args=(in_event,in_queue,out_queue),
                daemon=False,
                name=f"下载线程:{i}")
            threads.append(t)
        for t in threads:t.start()
        for t in threads:t.join()
        return
    
    def close(self):
        logger.debug("Closing the custom class")
        self.session.close()
        return


def download(save_dir,account,proxy,processes,in_event,in_queue,out_event,out_queue):
    with Downloader(save_dir,account,proxy) as downloader:
        downloader.execute(in_event,in_queue,out_queue)
    if not out_event.is_set(): out_event.set()
    logger.debug("下载进程结束！")
    return


def sender(ssh_args,remote_dir,in_event,in_queue):
    ssh = paramiko.SSHClient()
    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    ssh.connect(*ssh_args)
    sftp = ssh.open_sftp()
    while not(in_event.is_set() and in_queue.empty()):
        priority,local_path = in_queue.get()
        if not os.path.exists(local_path):
            logger.debug(f"{local_path}下载失败")
            continue
        file_name = "/".join(local_path.split('/')[-2:])
        remote_path = os.path.join(remote_dir,file_name)
        try:
            sftp.stat(remote_path)
        except FileNotFoundError:
            temp_path = remote_path.replace('.zip', '.incomplete')
            sftp.put(local_path, temp_path, confirm=True)
            sftp.rename(temp_path, remote_path)
    sftp.close()
    ssh.close()
    logger.debug("发送进程结束！")
    return
 

def checker(cargs,table_name):
    query = f"""
        SELECT EXISTS (
            SELECT 
            FROM information_schema.tables 
            WHERE table_name='{table_name}');"""
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    cursor = connection.cursor()
    try:
        cursor.execute(query)
        connection.commit()
        check_table_result = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close() 
    return check_table_result[0][0]


def create_table(cargs,table_name):
    logger.debug(f"Table '{table_name}' does not exist in the database.")
    query = f'''
        CREATE TABLE {table_name} (
            name VARCHAR(255) PRIMARY KEY,
            id VARCHAR(255),
            tile VARCHAR(255),
            cloud NUMERIC(10, 2),
            online bool,
            product_date timestamp,
            geom public.geometry(polygon, 4326),
            zip_path text,
            tif_path text,
            create_date timestamp);
            COMMENT ON COLUMN {table_name}.name IS '产品名称';
            COMMENT ON COLUMN {table_name}.id IS '下载ID';
            COMMENT ON COLUMN {table_name}.tile IS '图幅号';
            COMMENT ON COLUMN {table_name}.cloud IS '含云量';
            COMMENT ON COLUMN {table_name}.online IS '产品是否在线';
            COMMENT ON COLUMN {table_name}.geom IS '有效边界范围';
            COMMENT ON COLUMN {table_name}.product_date IS '产品日期';
            COMMENT ON COLUMN {table_name}.zip_path IS '原始zip存储路径';
            COMMENT ON COLUMN {table_name}.tif_path IS '合成10波段栅格存储路径';
            COMMENT ON COLUMN {table_name}.create_date IS '该条记录更新时间';'''
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    cursor = connection.cursor()
    try:
        cursor.execute(query)
        connection.commit()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close() 
    logger.debug(f"Table '{table_name}' exists in the database.")
    return


def handle_task(qhdm,period,account_index):
    with open('./config.json',"r") as file:
        params = json.load(file)
        proxy = params["proxy"]
        accounts = params["accounts"]
        cargs = params["cargs"]
        save_dir = params["save_dir"]
        table_name = params["table_name"]
        interval = params['interval']
        ssh_args = params['ssh_args']
        remote_dir = params['remote_dir']

    proxies = {
        'http': f"{proxy['ip']}:{proxy['port']}",
        'https': f"{proxy['ip']}:{proxy['port']}"}
    
    processes = 4
    account = accounts[2]
    tiles = get_tiles(cargs,qhdm)
    create_folder(save_dir,period)
    periods = get_periods(period)

    if not checker(cargs,table_name):create_table(cargs,table_name)

    # 存储查询的参数
    search_queue = queue.PriorityQueue(maxsize=1000)
    # 控制查询线程
    search_event = threading.Event()
    # 存储入库的参数
    update_queue = queue.Queue(maxsize=500)
    # 控制入库线程
    update_event = threading.Event()
    # 存储下载的参数
    download_queue = queue.PriorityQueue(maxsize=2000)
    # 控制下载线程
    download_event = threading.Event()
    # 存储发送的参数
    send_queue = queue.PriorityQueue(maxsize=1000)
    # 控制发送线程
    send_event = threading.Event()

    loader_thread = threading.Thread(
        target=loader,
        args=(tiles,periods,interval,search_event,search_queue),
        name='装载数据线程')
    loader_thread.setDaemon(False)
    loader_thread.start()
    logger.debug("开始装载")
    
    search_thread = threading.Thread(
        target=search,
        args=(
            proxy,
            search_event,
            search_queue,
            update_event,
            update_queue,
            download_event,
            download_queue),
        name='查询数据线程')
    search_thread.setDaemon(False)
    search_thread.start()
    logger.debug("开始查询")
    
    access_thread = threading.Thread(
        target=access,
        args=(cargs,update_event,update_queue),
        name='更新数据线程')
    access_thread.setDaemon(False)
    access_thread.start()
    logger.debug("开始入库")

    download_thread = threading.Thread(
        target=download,
        args=(
            save_dir,
            account,
            proxy,
            processes,
            download_event,
            download_queue,
            send_event,
            send_queue),
        name='下载数据线程')
    download_thread.setDaemon(False)
    download_thread.start()
    logger.debug("开始下载")
    
    sender_thread = threading.Thread(
        target=sender,
        args=(
            ssh_args,
            remote_dir,
            send_event,
            send_queue),
        name='发送数据线程')
    sender_thread.setDaemon(False)
    sender_thread.start()
    logger.debug("开始发送")
    
    loader_thread.join()
    search_thread.join()
    access_thread.join()
    download_thread.join()
    sender_thread.join()
    return


def loop_decorator(interval=1):
    def decorator(func):
        def wrapper(*args, **kwargs):
            while True:
                func(*args, **kwargs)
                print(f"{args},{kwargs}:任务结束！")
                time.sleep(interval)
        return wrapper
    return decorator


@loop_decorator(interval=2)
def handle_client(queue_client):
    data = queue_client.get()
    handle_task(**data)
    return


def listening(port,maxsize=3):
    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket:
        while True:
            try:
                server_socket.bind(('0.0.0.0', port))
                break
            except OSError as e:
                logger.debug("e")
                time.sleep(5)
        logger.debug(f"Listening on port {port}...")
        server_socket.listen(3)
        queue_client = queue.Queue(maxsize=maxsize)
        for index in range(maxsize):
            threading.Thread(
                target=handle_client,
                args=(queue_client,),
                name=f"任务线程{index}",
                daemon=True).start()
        while True:
            client_socket, addr = server_socket.accept()
            received_data = client_socket.recv(1024)
            data = json.loads(received_data.decode())
            logger.debug(f"Accepted connection from: {addr}")
            logger.debug(f"Received message: {data}")
            queue_client.put(data)
            response = "Processing has started"
            client_socket.send(response.encode())
            client_socket.close()
    return


if __name__ == "__main__":
    port = 8899
    logger.debug("CopernicusSentinel2 Daemon Processing Start!")
    daemon = daemonize.Daemonize(
        app="CopernicusSentinel2", 
        chdir=r'/data/ygsfb/projects/sentinel-raster-vector',
        pid=r'/tmp/CopernicusSentinel2.pid',
        keep_fds=keep_fds, 
        action=lambda:listening(port))
    daemon.start()



    
  


