from requests.packages.urllib3.util.retry import Retry
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from requests.adapters import HTTPAdapter
from datetime import datetime
import requests
import psycopg2
import queue
import json
import re
import os


def qhdm_2_tiles(cargs,qhdm):
    connection = psycopg2.connect(**cargs)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
            SELECT distinct china_tile.tile
            FROM china_tile
            JOIN dt_sy 
            on st_intersects(china_tile.geom,dt_sy.shape)
            WHERE dt_sy.qhdm = '{qhdm}' 
            """
    try:
        cursor.execute(query)
        connection.commit()
        data = cursor.fetchall()
    except Exception as e:
        connection.rollback()
    cursor.close()
    connection.close()
    return [i[0] for i in data] 


def data_2_table(cargs,data):
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
        INSERT INTO sentinel2_l1c
        (name,id,tile,cloud,online,product_date,geom,create_date) 
        VALUES %s
        ON CONFLICT (name)
        DO UPDATE SET 
        online=excluded.online,
        id=excluded.id,
        tile=excluded.tile,
        cloud=excluded.cloud,
        product_date=excluded.product_date,
        geom=excluded.geom,
        create_date=excluded.create_date;
        """
    execute_values(cursor, query, data)
    cursor.close()
    connection.close()
    return


def product_2_records(tile,product):
    product_date = datetime.strptime(
        product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
    wkt = re.search(r"'(.*?)'",product['Footprint']).group(1)
    cloud = list(filter(lambda x:x['Name']=="cloudCover",product['Attributes']))[0]["Value"]
    record = (
        product['Name'].replace(".SAFE",""),
        product['Id'],
        tile,
        cloud,
        product['Online'],
        product_date,
        wkt,
        datetime.now())
    return record


def tile_2_args(tile,start,end,cloud):
    attributes = [
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'tileId' and att/OData.CSC.StringAttribute/Value eq '{tile}')",
        f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'processingLevel' and att/OData.CSC.StringAttribute/Value eq 'S2MSI1C')",#S2MSI1C
        f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/OData.CSC.DoubleAttribute/Value lt {cloud})",
        f"ContentDate/Start gt {start}T00:00:00.000Z",
        f"ContentDate/Start lt {end}T00:00:00.000Z"]
    params = {
        '$filter':" and ".join(attributes),
        '$orderby': "ContentDate/Start desc",
        '$count':'True',
        '$expand': 'Attributes'} # Assets Attributes
    return params


def core(session,task_queue,cargs):
    while not task_queue.empty():
        tile, url, params= task_queue.get()
        try:  # 如果元素b存在则执行查询
            with session.get(url,params=params) as response:
                result = response.json()
                if response.status_code == 200:
                    nextlink = result.get("@odata.nextLink")
                    if nextlink:task_queue.put((tile,nextlink,None))
                    records = []
                    for product in result["value"]:
                        record = product_2_records(tile,product)
                        records.append(record)
                    data_2_table(cargs,records)
                else: 
                    print(response.status_code,response.json())
        except requests.RequestException as e: 
            print(e)
    return


def main(qhdm,sdate,edate,cloud):
    url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products"
    with open('./config.json',"r") as filehandle:
        params = json.load(filehandle)
    cargs = params["cargs"]   
    agent = params["agent"]
    proxies = {
        'http': f"{agent['ip']}:{agent['port']}",
        'https': f"{agent['ip']}:{agent['port']}"}
    retries = Retry(
            total=10,
            connect=5,
            read=5,
            backoff_factor=0.5,# 0.5
            raise_on_status=True,
            status_forcelist=[104,500,502,503,504])
    adapter = HTTPAdapter(
        max_retries=retries,
        pool_connections=1,
        pool_maxsize=1)
    session = requests.Session() 
    session.proxies.update(proxies)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    session.keep_alive = True
    tiles = qhdm_2_tiles(cargs,qhdm)
    task_queue = queue.Queue()
    for tile in tiles:
        params = tile_2_args(tile,sdate,edate,cloud)
        task_queue.put((tile,url,params))
    core(session,task_queue,cargs)  
    session.close()
    return


if __name__ == "__main__":
    qhdm = '41'
    sdate = '2024-02-01'
    edate = '2024-04-01'
    cloud = 100
    main(qhdm,sdate,edate,cloud)