# -*- coding: utf-8 -*-
import numpy as np
import os
import wrf_output
import gauge
import matplotlib.pyplot as plt


def decumulation(seq):
    result = [seq[i + 1] - seq[i] for i in range(len(seq) - 1)]
    result.insert(0, seq[0])
    return result


def enlarge_xy_label():
    for x_label in plt.gca().xaxis.get_ticklabels():
        x_label.set_fontsize(20)
    for y_label in plt.gca().yaxis.get_ticklabels():
        y_label.set_fontsize(20)


def get_sim(dat_path, grid_spacing, domain, start_time, mps, cus, pbls):
    """get simulations, accumulation according to different combinations of mp, cu and pbl"""
    gauge_loc = ''
    if grid_spacing == '139':
        gauge_loc = r'F:/research/rainfall_estimation/dat/Gauge/Gauge_row_col_1.csv'
    elif grid_spacing == '51545':
        gauge_loc = r'F:/research/rainfall_estimation/dat/Gauge/Gauge_row_col_5.csv'
    elif grid_spacing == '103090':
        gauge_loc = r'F:/research/rainfall_estimation/dat/Gauge/Gauge_row_col_10.csv'

    simulations = []
    labels = []
    for i in mps:
        for j in cus:
            for k in pbls:
                mp_cu_pbl = i + j + k
                if os.path.exists(dat_path + '/' + mp_cu_pbl):
                    labels.append(i + '-' + j + '-' + k)
                    input_path = dat_path + '/' + mp_cu_pbl + \
                        '/wrfout_d' + domain + '_' + start_time
                    all_gauges = wrf_output.GetAll(input_path, gauge_loc)

                    # wipeOff max and min value
                    # all_gauges_filter = np.array(
                    #     list(map(wipeOff_max_min, all_gauges)))
                    # average = np.mean(all_gauges_filter, axis=1)

                    # add simulation
                    hourly_simulation = decumulation(all_gauges)
                    hourly_average = np.mean(hourly_simulation, axis=1)
                    simulations.append(hourly_average)

    return simulations, labels


def compute_dispersion(simulations):
    sim_min = np.min(simulations, axis=0)
    sim_max = np.max(simulations, axis=0)
    band = np.array([sim_min, sim_max])
    # calculate dispersion statistic when ensemble mean larger than 0
    band_50 = np.mean(band, axis=0)
    band_mask = (band_50 <= 0)
    band_95 = band[0] + (band[1] - band[0]) * 0.95
    band_95 = band_95.astype(np.float64)
    band_5 = band[0] + (band[1] - band[0]) * 0.05
    band_5 = band_5.astype(np.float64)
    band_95_filtered = np.ma.array(band_95, mask=band_mask)
    band_5_filtered = np.ma.array(band_5, mask=band_mask)
    # dispersion statistic
    band_filtered = band_95_filtered - band_5_filtered
    dispersion = np.mean(band_filtered, axis=0)
    return dispersion



    # get every mp_cu group wrf_output and extract the center value
    mps = ['2', '6', '7']
    cus = ['0', '1', '2', '7']
    pbls = ['2', '6', '9']
    split_path = dat_path.split('/')
    gridSpacing = split_path[-1]
    gauge_loc = ''
    if gridSpacing == '139':
        gauge_loc = r'F:/research/rainfall_estimation/dat/Gauge' \
                    r'/Gauge_row_col_1.csv'
    elif gridSpacing == '51545':
        gauge_loc = r'F:/research/rainfall_estimation/dat/Gauge' \
                    r'/Gauge_row_col_5.csv'
    elif gridSpacing == '103090':
        gauge_loc = r'F:/research/rainfall_estimation/dat/Gauge' \
                    r'/Gauge_row_col_10.csv'
    simulations = []
    for i in mps:
        for j in cus:
            for k in pbls:
                mp_cu_pbl = i + j + k
                if os.path.exists(dat_path + '/' + mp_cu_pbl):
                    input_path = dat_path + '/' + mp_cu_pbl + \
                        '/wrfout_d' + domain + '_' + start_time
                    all_gauges = wrf_output.GetAll(input_path, gauge_loc)
                    all_gauges = decumulation(all_gauges)
                    hourly_average = np.mean(all_gauges, axis=1)
                    simulations.append(hourly_average)
    simulations = np.asarray(simulations)
    min = np.min(simulations, axis=0)
    max = np.max(simulations, axis=0)
    band = np.array([min, max])
    band_50 = np.mean(band, axis=0)
    # get the center gauge value
    gauge50 = gauge.Get50GaugeMean(start_time, end_time)

    # calculate dispersion statistic when ensemble mean larger than 0
    band_mask = (band_50 <= 0)
    band_95 = band[0] + (band[1] - band[0]) * 0.95
    band_95 = band_95.astype(np.float64)
    band_5 = band[0] + (band[1] - band[0]) * 0.05
    band_5 = band_5.astype(np.float64)
    band_95_filtered = np.ma.array(band_95, mask=band_mask)
    band_5_filtered = np.ma.array(band_5, mask=band_mask)
    # dispersion statistic
    band_filtered = band_95_filtered - band_5_filtered
    dispersion_d = np.mean(band_filtered, axis=0)

    mae = np.mean(np.abs((band_50 - gauge50.values)))
    me = np.mean((band_50 - gauge50.values))
    mse = np.mean(np.power((band_50 - gauge50.values), 2))

    # output statistic result
    all_statistic = np.array([dispersion_d, me, mae, mse])
    # all_statistic = all_statistic.T
    return all_statistic


def get_statistic(dat_path, domain, start_time, end_time):
    # get every mp_cu group wrf_output and extract the center value
    mps = ['2', '6', '7']
    cus = ['0', '1', '2', '7']
    pbls = ['2', '6', '9']
    split_path = dat_path.split('/')
    grid_spacing = split_path[-1]
    # get simulation
    simulations, labels = get_sim(
        dat_path, grid_spacing, domain, start_time, mps, cus, pbls)
    simulations = np.asarray(simulations)
    dispersion = compute_dispersion(simulations)

    gauges_mean = gauge.Get50GaugeMean(start_time, end_time)
    # compute dispersions of mp cu and pbl ensembles
    deviations = []
    for _, result in enumerate(simulations):
        deviation = result - gauges_mean
        deviations.append(np.mean(np.abs(deviation)))
    min_index = np.argmin(deviations)
    min_label = labels[min_index]
    min_labels = min_label.split('-')
    min_mp = [min_labels[0], ]
    min_cu = [min_labels[1], ]
    min_pbl = [min_labels[2], ]
    # compute mp dispersion
    mp_simulations, _ = get_sim(
        dat_path, grid_spacing, domain, start_time, mps, min_cu, min_pbl)
    mp_simulations = np.asarray(mp_simulations)
    mp_dispersion = compute_dispersion(mp_simulations)
    # compute cu dispersion
    cu_simulations, _ = get_sim(
        dat_path, grid_spacing, domain, start_time, min_mp, cus, min_pbl)
    cu_simulations = np.asarray(cu_simulations)
    cu_dispersion = compute_dispersion(cu_simulations)
    # compute pbl dispersion
    pbl_simulations, _ = get_sim(
        dat_path, grid_spacing, domain, start_time, min_mp, min_cu, pbls)
    pbl_simulations = np.asarray(pbl_simulations)
    pbl_dispersion = compute_dispersion(pbl_simulations)

    # get the center gauge value
    band_mean = np.mean(
        np.array([np.min(simulations, axis=0), np.max(simulations, axis=0)]), axis=0)

    mae = np.mean(np.abs((band_mean - gauges_mean.values)))
    me = np.mean((band_mean - gauges_mean.values))
    mse = np.mean(np.power((band_mean - gauges_mean.values), 2))

    # output statistic result
    all_statistic = np.array([dispersion, mp_dispersion, cu_dispersion, pbl_dispersion,
                              me, mae, mse])
    # all_statistic = all_statistic.T
    return all_statistic


def get_deviation(dat_path, domain, start_time, end_time, event):
    scenarios = ['139', '51545', '103090']
    for scenario in scenarios:
        input_path = dat_path + '/' + scenario
        stat = get_statistic(input_path, domain, start_time, end_time)
        print(scenario + " dispersion, mp_dispersion, cu_dispersion, pbl_dispersion, me, mae, mse:")
        print(stat)


def main():
    """main function"""
    # new run
    # R1
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/01/1700'
    # domain = r'03'
    # start_time = r'2008-01-17_00_00_00'
    # end_time = r'2008-01-19_12_00_00'
    # event = 'R1'

    # R2
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/01/1912'
    # domain = r'03'
    # start_time = r'2008-01-19_12_00_00'
    # end_time = r'2008-01-22_00_00_00'
    # event = 'R2'

    # R3
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/08/1718'
    # domain = r'03'
    # start_time = r'2008-08-17_18_00_00'
    # end_time = r'2008-08-20_00_00_00'
    # event = 'R3'

    # R4
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/09/0500'
    # domain = r'03'
    # start_time = r'2008-09-05_00_00_00'
    # end_time = r'2008-09-07_00_00_00'
    # event = 'R4'

    # R5
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/09/2900'
    # domain = r'03'
    # start_time = r'2008-09-29_00_00_00'
    # end_time = r'2008-10-02_06_00_00'
    # event = 'R5'

    # R6
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/10/2506'
    # domain = r'03'
    # start_time = r'2008-10-25_06_00_00'
    # end_time = r'2008-10-26_06_00_00'
    # event = 'R6'

    # R7
    # dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/11/0900'
    # domain = r'03'
    # start_time = r'2008-11-09_00_00_00'
    # end_time = r'2008-11-10_06_00_00'
    # event = 'R7'

    # R8
    dat_path = r'H:/research/rainfall_estimation/wrf_output/2008/12/0400'
    domain = r'03'
    start_time = r'2008-12-04_00_00_00'
    end_time = r'2008-12-06_00_00_00'
    event = 'R8'

    get_deviation(dat_path, domain, start_time, end_time, event)


if __name__ == '__main__':
    main()
