# -*- coding: utf-8 -*-
# Author: GuoYouMing
import time
from os import system, environ
environ["PBR_VERSION"] = "4.0.2"
from glob import glob
from tendo import singleton
from utils.tools import *
from utils.config import core_count, cldas_grib2_path, logpath
from multiprocessing import Pool, cpu_count
from pygrib import open as pyopen
from utils.read_micaps import read_micaps_3
from os.path import getctime, dirname, basename
from utils.logger import Logger

bc_logger = Logger(filename=f'{logpath}/bias_correct_{datetime.now().year}-{datetime.now().month}.log', level='info',
                   when='D', back_count=30).logger
processCnt = int(cpu_count() * 0.7) if core_count == 0 else core_count


def get_re_cldas(tem_cls, ctime):
    categorys = {'TMAX': ['_P_CLDAS_RT_CHN_0P05_DAY-MXT-', 'Maximum temperature'],
                 'TMIN': ['_P_CLDAS_RT_CHN_0P05_DAY-MNT-', 'Minimum temperature'],
                 'TEM03': ['_P_CLDAS_RT_CHN_0P05_HOR-TEM-', '2 metre temperature'],
                 }
    cldas_grb_files = glob(f"{cldas_grib2_path}/{ctime[:4]}/{ctime[:8]}/*{categorys[tem_cls][0]}*{ctime[8:10]}.GRB2")

    if len(cldas_grb_files):
        last_cldas_grb_file = sorted(cldas_grb_files, key=lambda fname: basename(fname).split('Z_NAFP_C_BABJ_')[1])[-1]

        grbs = pyopen(last_cldas_grb_file)
        grbs.seek(0)
        try:
            grb = grbs.select(name=categorys[tem_cls][1])[0]
        except ValueError:
            bc_logger.error(f"ERROR GRIB FILE: {last_cldas_grb_file}")
            return None

        area_data, lats, lons = grb.data(lat1=latmin, lat2=latmax, lon1=lonmin, lon2=lonmax)
        if lats[0, 0] > lats[1, 0]:
            area_data = area_data[::-1, :]
            bc_logger.info("Latitude Switch !")

        if (area_data.max() - C_K) > Tem_max or (area_data.min() - C_K) < Tem_min:  # 过滤异常文件

            bc_logger.error(
                f"ERROR GRIB DATA:{last_cldas_grb_file}\n{grb}\nTMAX={round(area_data.max())}, TMIN={round(area_data.min())}")
            return

        if revise_cldas:
            sfname = tem_cls[1:4] + '_24H' if tem_cls in ['TMAX', 'TMIN'] else 'TEM'
            station_file = f'{station_micaps_path}/micaps{ctime[:4]}/micaps{ctime[:6]}/SURFACE/TMP_{sfname}_NATIONAL/{ctime}0000.000'
            if not isfile(station_file):
                bc_logger.error(f"No Station File:{station_file}, {ctime} CLDAS DATA DOSE NOT REVISED!")

            else:
                station_data = read_micaps_3(station_file)

                if station_data is None:
                    bc_logger.error(f"Station File Error:{station_file}")

                else:
                    station_cnt = 0
                    for i, sid in enumerate(station_data.ID):

                        if sid not in National_station_ids:
                            continue

                        Latitude = station_data.lat[i]
                        Longitude = station_data.lon[i]
                        TEM = station_data.Var0[i]
                        if not Tem_min < TEM < Tem_max:
                            bc_logger.error(
                                f"Station Point Data Error:STATION ID:{sid}, Tem:{TEM}, file:{basename(station_file)}")
                        STATION_TEM = TEM + C_K

                        # x_axis = int((latmax - Latitude) / grid + 0.5)
                        x_axis = int((Latitude - latmin) / grid + 0.5)
                        y_axis = int((Longitude - lonmin) / grid + 0.5)
                        if x_axis >= area_width or y_axis >= area_height or x_axis < 0 or y_axis < 0:
                            continue

                        station_cnt += 1
                        area_data[x_axis, y_axis] = STATION_TEM

                    bc_logger.info(f"{ctime} CLDAS GRIB DATA REVISED SUCCESS")
        return area_data

    else:
        bc_logger.error(
            f"NO CLDAS FILE:{cldas_grib2_path}/{ctime[:4]}/{ctime[:8]}/*{categorys[tem_cls][0]}*{ctime[8:10]}.GRB2")
        return None


def bc(file):
    if time.time() - getctime(file) < 0.0001:
        return

    bc_logger.info(f"find new {basename(dirname(dirname(file)))} file: {file}")
    sctime = f"20{basename(file).split('.')[0]}"
    hstep = int(basename(file).split('.')[1])
    T_cls = file.split('/')[-2]

    backup_path = file.replace(basename(revise_out_path), basename(revise_out_history_path)) \
        if file.find(basename(revise_out12_path)) == -1 else file.replace(basename(revise_out12_path),
                                                                          basename(revise_out12_history_path))

    bcpath = file.replace(basename(revise_out_path), basename(bc_out_path)) \
        if file.find(basename(revise_out12_path)) == -1 else file.replace(basename(revise_out12_path),
                                                                          basename(bc_out12_path))

    issue_floder = dirname(file).replace(basename(revise_out_path),
                                         f"{basename(revise_issue_path)}/{basename(revise_out_path)}") \
        if file.find(basename(revise_out12_path)) == -1 else \
        dirname(file).replace(basename(revise_out12_path),
                              f"{basename(revise_issue_path)}/{basename(revise_out12_path)}")

    if hstep > bc_hsteps:
        bc_logger.warning(f"{basename(file)} {hstep} > bc_hsteps({bc_hsteps}), No Bias Correct")
        system(f"cp  {file} {bcpath} && mv {file} {backup_path}")
        return

    try:
        head, revise_data = read_micaps_4(file, get_header=True)
    except ValueError:
        bc_logger.error('ERROR DATA', file)
        system(f"mv {file} {issue_floder}")
        return

    d_list = []
    for i in range(bc_days, -1, -1):
        daystep = int(hstep // 24 + i)
        ltime = get_time(sctime, days=-daystep)

        # lasttime = ltime
        for hc in ['08', '20']:
            lasttime = ltime[:8] + hc

            lastsfile = f"{dirname(backup_path)}/{lasttime[2:10]}.{str(hstep).rjust(3, '0')}"

            lastsdata = read_micaps_4(lastsfile)

            fsttime = get_time(lasttime, hours=hstep)
            if int(fsttime[:10]) >= int(sctime[:10]):
                continue
            cldas_path = pathjoin(cldas_npy_path, T_cls, fsttime[2:10] + '.npy')
            if not isfile(cldas_path):
                bc_logger.warning(f"No {cldas_path}")
                lastcldas = get_re_cldas(T_cls, fsttime)
            else:
                lastcldas = np.load(cldas_path)

            if not isinstance(lastsdata, np.ndarray):
                bc_logger.error(f"{T_cls} {basename(file)} 未匹配到 {basename(lastsfile)}")
            elif not isinstance(lastcldas, np.ndarray):
                bc_logger.error(f"NO {T_cls} {fsttime[:10]} CLDAS DATA")
            elif isinstance(lastsdata, np.ndarray) and isinstance(lastcldas, np.ndarray):
                diff = lastcldas - lastsdata - C_K
                d_list.append(diff)
            else:
                bc_logger.error(
                    f"{T_cls} {basename(file)} 未匹配到历史数据: {basename(lastsfile)},{dirname(backup_path)}:{isfile(lastsfile)}, cldas:{isfile(cldas_path)}")

    if len(d_list) < (bc_days * 0.75):
        if not isdir(issue_floder):
            makedirs(issue_floder)
        system(f"cp {file} {backup_path} && cp {file} {bcpath} && mv {file} {issue_floder}")
        bc_logger.error(f"{basename(file)}没有足够的偏差计算数据，备份至{issue_floder}")
        return
    d_mean = np.mean(d_list, axis=0)

    data = revise_data + d_mean

    data[data > Tem_max] = Tem_max
    data[data < Tem_min] = Tem_min

    np.savetxt(bcpath, data, fmt=' %.2f', encoding='GBK', header=head.rstrip(), comments='')
    bc_logger.info(f"{file} bias correct succeed!")

    system(f"mv {file} {backup_path}")
    # time.sleep(0.001)


if __name__ == '__main__':
    run = True
    try:
        me = singleton.SingleInstance()
        bc_logger.info(f"bias correct start...")

    except:
        bc_logger.warning('bias correct process already  exist! ')
        run = False

    starttime = datetime.now()

    while run:
        for Tcls in Tcategorys:
            revise_out_dir = f"{revise_out_path}/{Tcls}"
            revise_out12_dir = f"{revise_out12_path}/{Tcls}"
            cldasPath = f'{cldas_npy_path}/{Tcls}'
            bcOutPath = f'{bc_out_path}/{Tcls}'
            bcOut12Path = f'{bc_out12_path}/{Tcls}'
            revise_out_history_dir = f"{revise_out_history_path}/{Tcls}"
            revise_out12_history_dir = f"{revise_out12_history_path}/{Tcls}"

            reout_files = sorted(glob(revise_out_dir + '/2' + '[0-9]' * 7 + '.???'))
            reout12_files = sorted(glob(revise_out12_dir + '/2' + '[0-9]' * 7 + '.???'))

            if len(reout_files) + len(reout12_files):

                # for file in reout_files:
                #     bc(file)
                # for file12 in reout12_files:
                #     bc(file12)
                pool = Pool(processes=processCnt)
                pool.map(bc, reout_files, chunksize=1)
                pool.map(bc, reout12_files, chunksize=1)
                pool.close()
                pool.join()

                starttime = datetime.now()

        if (datetime.now() - starttime).seconds > process_duration:
            bc_logger.info(f"timeout,Idle {process_duration} seconds, bias correct exits")
            break
