from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from dateutil.relativedelta import relativedelta
from datetime import datetime,timedelta
import psycopg2
import time
import json
import os


class Initialization:
    
    def __init__(self,period,save_dir,cargs,table_name):
        self.create_folder(save_dir,period)
        if not self.checker(cargs,table_name):
            self.create_table(cargs,table_name)

    @staticmethod
    def checker(cargs,table_name):
        query = f"""
            SELECT EXISTS (
                SELECT 
                FROM information_schema.tables 
                WHERE table_name='{table_name}');"""
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = connection.cursor()
        try:
            cursor.execute(query)
            connection.commit()
            check_table_result = cursor.fetchall()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close() 
        return check_table_result[0][0]
    
    @staticmethod
    def create_folder(save_dir,period):
        start_date,end_date = period
        start_date = datetime.strptime(start_date, '%Y-%m-%d')
        end_date = datetime.strptime(end_date, '%Y-%m-%d')
        current_date = start_date
        while current_date <= end_date:
            yearmonth = current_date.strftime('%Y%m')
            yearmonth_dir = os.path.join(save_dir,yearmonth)
            if not os.path.exists(yearmonth_dir):os.makedirs(yearmonth_dir)
            current_date += relativedelta(months=1)
        return 

    @staticmethod
    def create_table(cargs,table_name):
        logger.debug(f"Table '{table_name}' does not exist in the database.")
        query = f'''
            CREATE TABLE {table_name} (
                name VARCHAR(255) PRIMARY KEY,
                id VARCHAR(255),
                tile VARCHAR(255),
                cloud NUMERIC(10, 2),
                online bool,
                product_date timestamp,
                geom public.geometry(polygon, 4326),
                zip_path text,
                tif_path text,
                create_date timestamp);
                COMMENT ON COLUMN {table_name}.name IS '产品名称';
                COMMENT ON COLUMN {table_name}.id IS '下载ID';
                COMMENT ON COLUMN {table_name}.tile IS '图幅号';
                COMMENT ON COLUMN {table_name}.cloud IS '含云量';
                COMMENT ON COLUMN {table_name}.online IS '产品是否在线';
                COMMENT ON COLUMN {table_name}.geom IS '有效边界范围';
                COMMENT ON COLUMN {table_name}.product_date IS '产品日期';
                COMMENT ON COLUMN {table_name}.zip_path IS '原始zip存储路径';
                COMMENT ON COLUMN {table_name}.tif_path IS '合成10波段栅格存储路径';
                COMMENT ON COLUMN {table_name}.create_date IS '该条记录更新时间';'''
        connection = psycopg2.connect(**cargs)
        connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = connection.cursor()
        try:
            cursor.execute(query)
            connection.commit()
        except Exception as e:
            connection.rollback()
        cursor.close()
        connection.close() 
        logger.debug(f"Table '{table_name}' exists in the database.")
        return


def qhdm_2_tiles(cargs,qhdm):
    connection = psycopg2.connect(**cargs)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
            SELECT distinct china_tile.tile
            FROM china_tile
            JOIN dt_sy 
            on st_intersects(china_tile.geom,dt_sy.shape)
            WHERE dt_sy.qhdm = '{qhdm}' 
            """
    try:
        cursor.execute(query)
        connection.commit()
        data = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close()
    return [i[0] for i in data] 


def split_periods(period):
    start_date,end_date = period
    start_date = datetime.strptime(start_date,"%Y-%m-%d")
    end_date = datetime.strptime(end_date,"%Y-%m-%d")
    split_date = datetime.now()-timedelta(days=5)
    if start_date <= split_date <= end_date:
        history_period = (
            start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
        current_period = (
            split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
    elif end_date < split_date:
        history_period = (
            start_date.strftime("%Y-%m-%d"), split_date.strftime("%Y-%m-%d"))
        current_period = None
    elif split_date < start_date :
        history_period = None
        current_period = (
            split_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))
    else:
        raise("输入日期有误！")
    return history_period,current_period


def get_request_args(tile,period,cloud):
    start, end = period
    url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products"
    attributes = [
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
        f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {cloud})",
        f"ContentDate/Start gt {start}T00:00:00.000Z",
        f"ContentDate/Start lt {end}T00:00:00.000Z"]
    params = {
        '$filter':" and ".join(attributes),
        '$orderby': "ContentDate/Start desc",
        '$count':'True',
        '$expand': 'Attributes'} # Assets Attributes
    return (tile,url,params)


def execute_histroy(tiles,historyp,cloud,loading_queue,logger):
    if historyp: # 若历史时期存在，则执行立即查询、更新表、加载任务
        for tile in tiles:
            logger.info(f"loading:{tile} {historyp} {cloud}")
            item = get_request_args(tile,historyp,cloud)
            loading_queue.put(item)
    return


def execute_current(tiles,currentp,cloud,interval,loading_queue,logger):
    if currentp: # 如果近期存在，则执行定时查询、更新表、加载任务
        start_date, end_date = currentp
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        while start_date > datetime.now():
            logger.info('开始日期在今天之后，等待1小时为周期')
            time.sleep(60*60)
        while datetime.now() <= end_date + timedelta(days=5): # 今天小于执行结束日期，可执行
            if datetime.now()-timedelta(days=5) > start_date: # 若开始日期在前五天之前，开始日期重置为前五天
                sdate = datetime.now()-timedelta(days=5) 
            else:
                sdate = start_date
            if end_date > datetime.now(): # 若结束日期在今天之后，结束日期重置为明天
                edate = datetime.now()+timedelta(days=1)
            else:
                edate = end_date
            cperiod = (sdate.strftime("%Y-%m-%d"),edate.strftime("%Y-%m-%d"))
            for tile in tiles:
                logger.info(f"loading:{tile} {cperiod} {cloud}")
                item = get_request_args(tile,cperiod,cloud)
                loading_queue.put(item)
            logger.info("开始以8小时为周期的休眠！")
            time.sleep(interval)  # 8小时为周期
    return


def core(cargs,qhdm,period,cloud,interval,loading_queue,logger):
    tiles = qhdm_2_tiles(cargs,qhdm)
    historyp, currentp = split_periods(period)
    execute_histroy(tiles,historyp,cloud,loading_queue,logger)
    execute_current(tiles,currentp,cloud,interval,loading_queue,logger)
    loading_queue.put(None)
    logger.info("加载结束！")
    return


def main(cargs,qhdm,period,cloud,interval,redisr):
    tiles = qhdm_2_tiles(cargs,qhdm)
    historyp, currentp = split_periods(period)

    if historyp: # 若历史时期存在，则执行立即查询、更新表、加载任务
        for tile in tiles:
            print(f"loading:{tile} {historyp} {cloud}")
            item = get_request_args(tile,historyp,cloud)
            # loading_queue.put(item)
            item = {"tile":item[0],"url":item[1],"params":item[2]}
            redisr.lpush('loader_searcher', json.dumps(item))
            
    if currentp: # 如果近期存在，则执行定时查询、更新表、加载任务
        start_date, end_date = currentp
        start_date = datetime.strptime(start_date,"%Y-%m-%d")
        end_date = datetime.strptime(end_date,"%Y-%m-%d")
        while start_date > datetime.now():
            print('开始日期在今天之后，等待1小时为周期')
            time.sleep(60*60)
        while datetime.now() <= end_date + timedelta(days=5): # 今天小于执行结束日期，可执行
            if datetime.now()-timedelta(days=5) > start_date: # 若开始日期在前五天之前，开始日期重置为前五天
                sdate = datetime.now()-timedelta(days=5) 
            else:
                sdate = start_date
            if end_date > datetime.now(): # 若结束日期在今天之后，结束日期重置为明天
                edate = datetime.now()+timedelta(days=1)
            else:
                edate = end_date
            cperiod = (sdate.strftime("%Y-%m-%d"),edate.strftime("%Y-%m-%d"))
            for tile in tiles:
                print(f"loading:{tile} {cperiod} {cloud}")
                item = get_request_args(tile,cperiod,cloud)
                # loading_queue.put(item)
                item = {"tile":item[0],"url":item[1],"params":item[2]}
                redisr.lpush('loader_searcher', json.dumps(item))
    
            print("开始以8小时为周期的休眠！")
            time.sleep(interval)  # 8小时为周期      
    redisr.lpush('loader_searcher', json.dumps(item))
    print("加载结束！")
    return
