from requests.packages.urllib3.util.retry import Retry
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from psycopg2.extras import execute_values
from requests.adapters import HTTPAdapter
from datetime import datetime
import requests
import psycopg2
import re
import os
import daemon
import redis
import json


def updata_table(cargs,data):
    connection = psycopg2.connect(**cargs)
    connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    connection.autocommit = True
    cursor = connection.cursor()
    query = f"""
        INSERT INTO sentinel2_l1c
        (name,id,tile,cloud,online,product_date,geom,create_date) 
        VALUES %s
        ON CONFLICT (name)
        DO UPDATE SET 
        online=excluded.online,
        id=excluded.id,
        tile=excluded.tile,
        cloud=excluded.cloud,
        product_date=excluded.product_date,
        geom=excluded.geom,
        create_date=excluded.create_date;
        """
    execute_values(cursor, query, data)
    cursor.close()
    connection.close()
    return


def analysis_product(tile,product):
    product_date = datetime.strptime(
        product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
    wkt = re.search(r"'(.*?)'",product['Footprint']).group(1)
    cloud = list(filter(lambda x:x['Name']=="cloudCover",product['Attributes']))[0]["Value"]
    record = (
        product['Name'].replace(".SAFE",""),
        product['Id'],
        tile,
        cloud,
        product['Online'],
        product_date,
        wkt,
        datetime.now())
    if product['Online']: # 获取在线可下载数据
        ContentDate=datetime.strptime(product['ContentDate']['Start'],"%Y-%m-%dT%H:%M:%S.%fZ")
        params = [
            product['Name'].replace(".SAFE",""),
            ContentDate.strftime("%Y%m%d"),
            product['Id']]
    else:
        params = []
    return record,params


def main():
    with open('/data/jiabing/RS_CODE/Sentinel2download/config.json',"r") as file:
        params = json.load(file)
    agent = params["agent"]
    cargs = params["cargs"]
    rediscon = params["rediscon"]
    proxies = {
        'http': f"{agent['ip']}:{agent['port']}",
        'https': f"{agent['ip']}:{agent['port']}"}
    id_infos = set()
    retries = Retry(
            total=10,
            connect=5,
            read=5,
            backoff_factor=0.5,# 0.5
            raise_on_status=True,
            status_forcelist=[104,500,502,503,504])
    adapter = HTTPAdapter(
        max_retries=retries,
        pool_connections=1,
        pool_maxsize=1)
    session = requests.Session() 
    session.proxies.update(proxies)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    session.keep_alive = True
    pool = redis.ConnectionPool(**rediscon)
    redisr = redis.Redis(connection_pool=pool)
    while True:
        _,element = redisr.brpop('loader_searcher',0)
        element = element.decode('utf-8')
        data = json.loads(element)
        tile,url,params,priority = data
        print(f'{url}')
        try:  # 如果元素b存在则执行查询
            with session.get(url,params=params) as response:
                result = response.json()
                if response.status_code == 200:
                    nextlink = result.get("@odata.nextLink")
                    if nextlink: 
                        item = [tile,nextlink,None]
                        redisr.rpush('loader_searcher', json.dumps(item))
                    records = []
                    for product in result["value"]:
                        if product['Id'] not in id_infos:
                            record,params = analysis_product(tile,product)
                            records.append(record)
                            if priority=="high":
                                redisr.rpush('searcher_downloader', json.dumps(params))
                            else:
                                redisr.lpush('searcher_downloader', json.dumps(params))
                    updata_table(cargs,records)  # 更新表
                else: 
                    print(response.status_code,response.json())
        except requests.RequestException as e: 
            print(e)
    session.close() 
    return


if __name__ == "__main__":
    with daemon.DaemonContext(
        stdout=open(r"/data/logfile/SearchDaemon_out.log","w"),
        stderr=open(r"/data/logfile/SearchDaemon_err.log","w")):
        main()