import daemon
import redis
import json
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from datetime import datetime,timedelta
import psycopg2
import time
import threading
from threading import Thread
import os
import flask
from flask import request
from waitress import serve
import inspect
import ctypes
import signal


app = flask.Flask(__name__)
app.config['JSON_AS_ASCII'] = flask
event_dict = {}


def qhdm_2_tiles(cargs,qhdm):
    connection = psycopg2.connect(**cargs)
    connection.autocommit = True
    cursor = connection.cursor()
    # query = f"""
    #         SELECT distinct china_tile.tile
    #         FROM china_tile
    #         JOIN dt_sy 
    #         on st_intersects(china_tile.geom,dt_sy.shape)
    #         WHERE dt_sy.qhdm = '{qhdm}' 
    #         """
    query = f"""
            SELECT distinct tile
            FROM qh_tile
            WHERE qhdm = '{qhdm}' 
            """
    try:
        cursor.execute(query)
        connection.commit()
        data = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close()
    return [i[0] for i in data] 


def split_periods(period):
    start_date,end_date = period
    start_date = datetime.strptime(start_date,"%Y-%m-%d")
    end_date = datetime.strptime(end_date,"%Y-%m-%d")
    split_date = datetime.now()-timedelta(days=5)
    if start_date <= split_date <= end_date:
        history_period = (
            start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
        current_period = (
            split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
    elif end_date < split_date:
        history_period = (
            start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
        current_period = None
    elif split_date < start_date :
        history_period = None
        current_period = (
            split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
    else:
        raise("输入日期有误！")
    return history_period,current_period


def get_request_args(tile,period,cloud):
    start, end = period
    url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products"
    attributes = [
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
        f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {cloud})",
        f"ContentDate/Start gt {start}T00:00:00.000Z",
        f"ContentDate/Start lt {end}T00:00:00.000Z"]
    params = {
        '$filter':" and ".join(attributes),
        '$orderby': "ContentDate/Start desc",
        '$count':'True',
        '$expand': 'Attributes'} # Assets Attributes
    return (tile,url,params)


def execute_histroy(tiles,historyp,cloud,loading_queue,logger):
    if historyp: # 若历史时期存在，则执行立即查询、更新表、加载任务
        for tile in tiles:
            logger.info(f"loading:{tile} {historyp} {cloud}")
            item = get_request_args(tile,historyp,cloud)
            loading_queue.put(item)
    return


def execute_current(tiles,currentp,cloud,interval,loading_queue,logger):
    if currentp: # 如果近期存在，则执行定时查询、更新表、加载任务
        start_date, end_date = currentp
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        while start_date > datetime.now():
            logger.info('开始日期在今天之后，等待1小时为周期')
            time.sleep(60*60)
        while datetime.now() <= end_date + timedelta(days=5): # 今天小于执行结束日期，可执行
            if datetime.now()-timedelta(days=5) > start_date: # 若开始日期在前五天之前，开始日期重置为前五天
                sdate = datetime.now()-timedelta(days=5) 
            else:
                sdate = start_date
            if end_date > datetime.now(): # 若结束日期在今天之后，结束日期重置为明天
                edate = datetime.now()+timedelta(days=1)
            else:
                edate = end_date
            cperiod = (sdate.strftime("%Y-%m-%d"),edate.strftime("%Y-%m-%d"))
            for tile in tiles:
                logger.info(f"loading:{tile} {cperiod} {cloud}")
                item = get_request_args(tile,cperiod,cloud)
                loading_queue.put(item)
            logger.info("开始以8小时为周期的休眠！")
            time.sleep(interval)  # 8小时为周期
    return


def main(qhdm,period:list,cloud:int,priority,event):
    with open('/data/jiabing/RS_CODE/Sentinel2download/config.json',"r") as file:
        params = json.load(file)
    cargs = params["cargs"]
    interval = params['interval']
    rediscon = params["rediscon"]
    pool = redis.ConnectionPool(**rediscon)
    redisr = redis.Redis(connection_pool=pool)
    tiles = qhdm_2_tiles(cargs,qhdm)
    historyp, currentp = split_periods(period)
    if historyp: # 若历史时期存在，则执行立即查询、更新表、加载任务
        for tile in tiles:
            print(f"loading:{tile} {historyp} {cloud}")
            item = get_request_args(tile,historyp,cloud)
            redisr.lpush('loader_searcher', json.dumps(list(item)+[priority]))    
    if currentp: # 如果近期存在，则执行定时查询、更新表、加载任务
        start_date, end_date = currentp
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        while start_date > datetime.now():
            print('开始日期在今天之后，等待1小时为周期')
            time.sleep(60*60)
        while datetime.now() <= end_date + timedelta(days=5): # 今天小于执行结束日期，可执行
            if datetime.now()-timedelta(days=5) > start_date: # 若开始日期在前五天之前，开始日期重置为前五天
                sdate = datetime.now()-timedelta(days=5) 
            else:
                sdate = start_date
            if end_date > datetime.now(): # 若结束日期在今天之后，结束日期重置为明天
                edate = datetime.now()+timedelta(days=1)
            else:
                edate = end_date
            cperiod = (sdate.strftime("%Y-%m-%d"),edate.strftime("%Y-%m-%d"))
            for tile in tiles:
                print(f"loading:{tile} {cperiod} {cloud}")
                item = get_request_args(tile,cperiod,cloud)
                redisr.rpush('loader_searcher', json.dumps(list(item)+[priority]))
            print("开始以8小时为周期的休眠！")
            eight_hours_later = datetime.now() + timedelta(hours=interval)
            while datetime.now() <= eight_hours_later:
                if event.is_set(): 
                    print("加载任务中断！")
                    return
                time.sleep(1)     
    print("加载任务结束！")
    event.set()
    return
 
 
def check_thread_exists(thread_id):
    for thread in threading.enumerate():
        if thread.ident == thread_id:
            return True
    return False


@app.route('/start_task', methods=['post'])
def start_task():
    if request.is_json:
        data = request.json
        qhdm = data.get('qhdm')
        start_date = data.get('start_date')
        end_date = data.get('end_date')
        cloud = data.get('cloud')
        product_type = data.get('product_type')
        if product_type not in ["S2","S210"]:
            return {"code":400, "msg":"产品类型错误"}
        level = data.get('level')
        priority = data.get('priority') # high，middle，low
        period = [start_date,end_date] 
        event = threading.Event()  # 创建一个事件对象
        thread = Thread(
            target=main,
            args=(qhdm,period,cloud,priority,event),
            daemon=True)
        thread.start()
        task_id =thread.ident
        event_dict.update({task_id:event})
        return {'code': 200, "task_id":task_id}
    else:
        return {'code': 401, 'msg': 'Request must be JSON'} 


@app.route('/stop_task', methods=['post'])
def stop_task():
    if request.is_json:
        data = request.json
        thread_id = int(data.get('task_id'))
        if thread_id in event_dict:
            while check_thread_exists(thread_id):
                event = event_dict[thread_id]
                if not event.is_set(): event.set()
            del event_dict[thread_id]
            return {'code': 200, 'msg': "任务已停止！"}
        else:
            return {'code': 400, 'msg': "任务不存在！"}
    else:
        return {'code': 401, 'msg': 'Request must be JSON'} 


if __name__ == "__main__":
    with daemon.DaemonContext(
        stdout=open(r"/data/logfile/LoaderDaemon_out.log","w"),
        stderr=open(r"/data/logfile/LoaderDaemon_err.log","w")
        ):
        serve(app,host='0.0.0.0',port='8002')
    #     app.run(debug=True,port='8888',host='0.0.0.0')

