#!/usr/bin/env python3

import argparse
import numpy as np
import pendulum
import pandas as pd
import json
from jinja2 import Template
import os
import sys
script_root = os.path.dirname(os.path.realpath(__file__))
sys.path.append(f'{script_root}/../utils')
from utils import cli, parse_config, edit_file, check_files, run, has_key

def write_json(init_time, fcst_hour, met_work_dir, json_file_path):
	'''
	point_stat:
		CNT: Continuous Statistics
		    -> PR_CORR:Pearson correlation coefficient
		       SP_CORR:Spearman's rank correlation coefficient
		       KT_CORR:Kendall's tau statistic
		       ME:     Mean error (F-O)
		       MAE:    Mean absolute error
		       RMSE:   Root mean squared error
		       ME2:    The square of the mean error (bias)
		VCNT: Vector Continuous Statistics
            -> FBAR:   Mean value of forecast wind speed
               OBAR:   Mean value of observed wind speed
		       FS_RMS: Root mean square forecast wind speed
		       OS_RMS: Root mean square observed wind speed
		VAL1L2: Vector Anomaly Partial Sums
		    -> UVFOABAR: Mean((uf-uc)*(uo-uc)+(vf-vc)*(vo-vc))
		       UVFFABAR: Mean((uf-uc)²+(vf-vc)²)
		       UVOOABAR: Mean((uo-uc)²+(vo-vc)²)
		MPR: Matched Pair
		    -> OBS_SID: Station Identifier of observation
		       OBS_LAT: Latitude of the observation in degrees north
		       OBS_LON: Longitude of the observation in degrees east
		       OBS_LVL: Pressure level of the observation in hPa or accumulation interval in hours
		       OBS_ELV: Elevation of the observation in meters above sea level
		       FCST:    Forecast value interpolated to the observation location
		       OBS:     Observation value
	CNT file: /data/home/sunwei/dongli/test_osse/2021080300/ref/met/point_stat_240000L_20210804_000000V_cnt.txt
    VAL1L2                                                          point_stat_240000L_20210804_000000V_val1l2.txt
	MPR                                                             point_stat_240000L_20210804_000000V_mpr.txt
	'''
	valid_time = init_time.add(hours=fcst_hour)
	post_dict  = {
		'init_time': init_time.format('YYYYMMDDHHmmSS'),
		'valid_time': valid_time.format('YYYYMMDDHHmmSS'),
		'fcst_hour': fcst_hour
	}
	# CNT
	cnt_path   = os.path.join(met_work_dir, f'point_stat_{fcst_hour:02}0000L_{valid_time.format("YYYYMMDD")}_{valid_time.format("HHmmSS")}V_cnt.txt')
	cnt_dict   = {}
	if os.path.exists(cnt_path):
		df = pd.read_csv(cnt_path, delim_whitespace=True)
		if len(df) < 2:
			print(f'[Warning]: {cnt_path} is empty')
			return
		df['var_lev_type'] = df['FCST_LEV'].apply(lambda x: x[0])
		df['var_lev']      = df['FCST_LEV'].apply(lambda x: x[1:])
		df['var_lev']      = df['var_lev'].astype('f8')
		# ADPUPA
		upa_dict    = {} 
		one_type_df = df[ df['OBTYPE'] == 'ADPUPA' ]
		if len(one_type_df) > 0:
			upa_dict['var_lev_type'] = one_type_df['var_lev_type'].values[0]
			var_list                 = list(set(one_type_df['FCST_VAR']))
			upa_dict['var']          = var_list
			for i_var in var_list:
				var_df = one_type_df[ one_type_df['FCST_VAR'] == i_var ]
				var_df = var_df.sort_values(by='var_lev')
				upa_dict[i_var]         = {}
				upa_dict[i_var]['units']= var_df['FCST_UNITS'].values[0]
				upa_dict[i_var]['lev']  = var_df['var_lev'].values.tolist()
				upa_dict[i_var]['rmse'] = np.around(var_df['RMSE'].values, decimals=4).tolist()
				upa_dict[i_var]['me']   = np.around(var_df['ME'].values, decimals=4).tolist()
				upa_dict[i_var]['me2']  = np.around(var_df['ME2'].values, decimals=4).tolist()
				upa_dict[i_var]['mae']  = np.around(var_df['MAE'].values, decimals=4).tolist()
				upa_dict[i_var]['pr_corr'] = np.around(var_df['PR_CORR'].fillna(-99999).values, decimals=4).tolist()
			cnt_dict['ADPUPA'] = upa_dict
		# ADPSFC
		sfc_dict    = {}
		one_type_df = df[ df['OBTYPE'] == 'ADPSFC' ]
		if len(one_type_df) > 0:
			sfc_dict['var_lev_type'] = one_type_df['var_lev_type'].values[0]
			var_list                 = list(set(one_type_df['FCST_VAR']))
			sfc_dict['var']          = var_list
			for i_var in var_list:
				var_df = one_type_df[ one_type_df['FCST_VAR'] == i_var ]
				var_df = var_df.sort_values(by='var_lev')
				sfc_dict[i_var]         = {}
				sfc_dict[i_var]['units']= var_df['FCST_UNITS'].values[0]
				sfc_dict[i_var]['lev']  = var_df['var_lev'].values.tolist()
				sfc_dict[i_var]['rmse'] = np.around(var_df['RMSE'].values, decimals=4).tolist()
				sfc_dict[i_var]['me']   = np.around(var_df['ME'].values, decimals=4).tolist()
				sfc_dict[i_var]['me2']  = np.around(var_df['ME2'].values, decimals=4).tolist()
				sfc_dict[i_var]['mae']  = np.around(var_df['MAE'].values, decimals=4).tolist()
				sfc_dict[i_var]['pr_corr'] = np.around(var_df['PR_CORR'].values, decimals=4).tolist()
			cnt_dict['ADPSFC'] = sfc_dict

	post_dict['CNT'] = cnt_dict

	# MPR
	mpr_path   = os.path.join(met_work_dir, f'point_stat_{fcst_hour:02}0000L_{valid_time.format("YYYYMMDD")}_{valid_time.format("HHmmSS")}V_mpr.txt')
	mpr_dict   = {}
	if os.path.exists(mpr_path):
		if len(df) < 2:
			print(f'[Warning]: {mpr_path} is empty')
			return
		df = pd.read_csv(mpr_path, delim_whitespace=True)
		df['var_lev_type'] = df['FCST_LEV'].apply(lambda x: x[0])
		df['var_lev']      = df['FCST_LEV'].apply(lambda x: x[1:])
		#df['var_lev']      = df['var_lev'].astype('f8')

		# ADPUPA
		upa_dict    = {}
		one_type_df = df[ df['OBTYPE'] == 'ADPUPA' ]
		if len(one_type_df) >0:
			upa_dict['var_lev_type'] = one_type_df['var_lev_type'].values[0]
			var_list                 = list(set(one_type_df['FCST_VAR']))
			upa_dict['var']          = var_list
			for i_var in var_list:
				var_df                  = one_type_df[ one_type_df['FCST_VAR'] == i_var ]
				levs                    = list(set(var_df['var_lev']))
				levs                    = list(map(int, levs))
				levs.sort()
				levs                    = list(map(str, levs))
				upa_dict[i_var]         = {}
				upa_dict[i_var]['levs'] = levs
				for i_lev in levs:
					var_lev_df             = var_df[ var_df['var_lev'] == i_lev ]
					var_lev_df             = var_lev_df.sort_values(by='FCST')
					upa_dict[i_var][i_lev] = {}
					upa_dict[i_var][i_lev]['units']      = var_lev_df['FCST_UNITS'].values[0]
					upa_dict[i_var][i_lev]['obs_lon']    = np.around(var_lev_df['OBS_LON'].values, decimals=4).tolist()
					upa_dict[i_var][i_lev]['obs_lat']    = np.around(var_lev_df['OBS_LAT'].values, decimals=4).tolist()
					upa_dict[i_var][i_lev]['obs_value']  = np.around(var_lev_df['OBS'].values, decimals=4).tolist()
					upa_dict[i_var][i_lev]['fcst_value'] = np.around(var_lev_df['FCST'].values, decimals=4).tolist()
			mpr_dict['ADPUPA'] = upa_dict
		# ADPSFC
		sfc_dict    = {}
		one_type_df = df[ df['OBTYPE'] == 'ADPSFC' ]
		if len(one_type_df) >0:
			sfc_dict['var_lev_type'] = one_type_df['var_lev_type'].values[0]
			var_list                 = list(set(one_type_df['FCST_VAR']))
			sfc_dict['var']          = var_list
			for i_var in var_list:
				var_df               = one_type_df[ one_type_df['FCST_VAR'] == i_var ]
				var_df               = var_df.sort_values(by='FCST')
				sfc_dict[i_var]      = {}
				sfc_dict[i_var]['units']     = var_df['FCST_UNITS'].values[0]
				sfc_dict[i_var]['lev']       = var_df['var_lev'].values.tolist()
				sfc_dict[i_var]['obs_lon']   = np.around(var_df['OBS_LON'].values, decimals=4).tolist()
				sfc_dict[i_var]['obs_lat']   = np.around(var_df['OBS_LAT'].values, decimals=4).tolist()
				sfc_dict[i_var]['obs_value'] = np.around(var_df['OBS'].values, decimals=4).tolist()
				sfc_dict[i_var]['fcst_value']= np.around(var_df['FCST'].values, decimals=4).tolist()
			mpr_dict['ADPSFC'] = sfc_dict

	post_dict['MPR'] = mpr_dict

	with open(json_file_path, 'w') as f:
		json.dump(post_dict, f)

def run_met(work_root, met_root, config, args, tag=None):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	fcst_hours = config['custom']['forecast_hours']
	max_dom = config['domains']['max_dom']

	start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
	end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

	if tag:
		upp_work_dir = f'{work_root}/{tag}/upp'
	else:
		upp_work_dir = f'{work_root}/upp'
	if not os.path.isdir(upp_work_dir): cli.error('UPP is not run successfully!')

	if tag:
		met_work_dir = f'{work_root}/{tag}/met'
	else:
		met_work_dir = f'{work_root}/met'
	if not os.path.isdir(met_work_dir): os.makedirs(met_work_dir)
	old_dir = os.getcwd()
	os.chdir(met_work_dir)

	cli.stage('Prepare observation file for MET.')

	if has_key(config['custom'], ['met', 'obs_nc']):
		cli.notice(f'Find MET netCDF observation files.')
		dir_pattern = config['custom']['met']['obs_nc']['dir_pattern']
		file_pattern = config['custom']['met']['obs_nc']['file_pattern']
		obs_dir = Template(dir_pattern).render(time=end_time)
		obs_file = Template(file_pattern).render(time=end_time)
		met_obs_nc = f'{obs_dir}/{obs_file}'
		if not os.path.isfile(met_obs_nc):
			cli.error(f'NetCDF observation {met_obs_nc} does not exist!')
		run(f'ln -sf {met_obs_nc} ob.nc')
	else:
		expected_files = ['ob.nc']
		if not check_files(expected_files) or args.force:
			if args.littler_root or has_key(config, ('custom', 'littler')):
				if 'littler' in config['custom']:
					dir_pattern = config['custom']['littler']['dir_pattern']
					if type(config['custom']['littler']['file_pattern']) == str:
						file_pattern = config['custom']['littler']['file_pattern']
					elif type(config['custom']['littler']['file_pattern']) == dict and has_key(config['custom']['littler']['file_pattern'], 'conv'):
						file_pattern = config['custom']['littler']['file_pattern']['conv']
					else:
						cli.error(f'Unable to get LITTLE_R file_pattern in config object!')
					obs_dir = Template(dir_pattern).render(time=end_time)
					obs_file = Template(file_pattern).render(time=end_time)
					if not os.path.isfile(f'{args.littler_root}/{obs_dir}/{obs_file}'):
						cli.error(f'Observation {args.littler_root}/{obs_dir}/{obs_file} does not exist!')
					run(f'{met_root}/bin/ascii2nc -format little_r {args.littler_root}/{obs_dir}/{obs_file} ob.nc -config {script_root}/../config/Ascii2NcConfig')
			elif args.prepbufr_root:
				prepbufr_file = f'{args.prepbufr_root}/gdas.{end_time.format("YYYYMMDD")}/prepbufr.gdas.{end_time.format("YYYYMMDDHH")}.nc'
				if os.path.isfile(prepbufr_file):
					run(f'ln -sf {prepbufr_file} ob.nc')
				else:
					run(f'{met_root}/bin/pb2nc {prepbufr_file.replace(".nc", ".nr")} ob.nc {met_root}/share/met/config/PB2NCConfig_default')

			if not check_files(('ob.nc')):
				cli.error('Failed to prepare netCDF observation file!')
	run(f'ls -l {met_work_dir}/ob.nc')

	cli.stage('Prepare configuration file.')

	doms = []
	if has_key(config['custom'], ['met', 'doms']):
		doms = config['custom']['met']['doms']
	else:
		doms = ['d01']
	for dom in doms:
		json_file_path = f'{dom}_{start_time.format("YYYYMMDDHHmmSS")}_f{fcst_hours:03}.json'
		expected_files = [json_file_path]
		if not check_files(expected_files) or args.force:
			run(f'cp -f {script_root}/../config/PointStatConfig .')
			run(f'{met_root}/bin/point_stat {upp_work_dir}/WRFPRS.{dom}.GrbF{fcst_hours:02} ob.nc PointStatConfig')
			write_json(start_time, fcst_hours, met_work_dir, json_file_path)
		else:
			run(f'ls -l {json_file_path}')

	cli.notice('Succeeded.')
	os.chdir(old_dir)
