#!/usr/bin/env python3

import argparse
from glob import glob
import os
import pendulum
import f90nml
from netCDF4 import Dataset
import re
from shutil import copyfile
import sys
sys.path.append(f'{os.path.dirname(os.path.realpath(__file__))}/../utils')
from utils import cli, check_files, search_files, run, submit_job, parse_config

def run_real(work_root, wrf_root, config, args, wps_work_dir=None, tag=None):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	if tag != None:
		wrf_work_dir = f'{work_root}/{tag}/wrf'
	else:
		wrf_work_dir = f'{work_root}/wrf'
	if not os.path.isdir(wrf_work_dir): os.mkdir(wrf_work_dir)
	old_dir = os.getcwd()
	os.chdir(wrf_work_dir)

	if wps_work_dir == None:
		wps_work_dir = f'{work_root}/wps'
	if not os.path.isdir(wps_work_dir): cli.error(f'WPS work directory {wps_work_dir} does not exist!')

	cli.stage(f'Run real.exe at {wrf_work_dir} ...')
	expected_files = ['wrfinput_d{:02d}_{}'.format(i + 1, start_time_str) for i in range(max_dom)]
	if start_time != end_time: expected_files.append('wrfbdy_d01')
	if not check_files(expected_files) or args.force:
		run('rm -f wrfinput_* met_em.*.nc')
		run(f'ln -sf {wps_work_dir}/met_em.*.nc .')
		try:
			dataset = Dataset(glob('met_em.*.nc')[0])
		except:
			cli.error('Failed to open one of met_em.*.nc file!')
		# Check met_em file.
		if not 'num_st_layers' in dataset.dimensions or  dataset.dimensions['num_st_layers'].size == 0:
			cli.error('Failed to run ungrib and metgrid due to num_metgrid_soil_levels is zero!')
		namelist_input = f90nml.read('./namelist.input')
		namelist_input['domains']['num_metgrid_levels'] = dataset.dimensions['num_metgrid_levels'].size
		namelist_input['physics']['num_land_cat'] = dataset.getncattr('NUM_LAND_CAT')
		if 'num_st_layers' in dataset.dimensions:
			namelist_input['domains']['num_metgrid_soil_levels'] = dataset.dimensions['num_st_layers'].size
		else:
			cli.warning(f'Dimension num_st_layers is not in {dataset.filepath()}! Set num_metgrid_soil_levels to 0.')
			namelist_input['domains']['num_metgrid_soil_levels'] = 0
		dataset.close()
		namelist_input.write('./namelist.input', force=True)
		submit_job(f'{wrf_root}/run/real.exe', args.np, config, args, wait=True, exclusive=True)
		for i in range(max_dom):
			if not os.path.isfile('wrfinput_d{0:02d}'.format(i + 1)):
				# Check if the failure is caused by parallel computing?
				# cli.warning('Failed to run real.exe in parallel. Try to run in serial.')
				# submit_job(f'{wrf_root}/run/real.exe', 1, config, args, wait=True)
				if not os.path.isfile('wrfinput_d{0:02d}'.format(i + 1)):
					cli.error(f'Still failed to generate wrfinput_d{0:02d}! See {wrf_work_dir}/rsl.error.0000.'.format(i + 1))
			run('ln -sf wrfinput_d{0:02d} wrfinput_d{0:02d}_{1}'.format(i + 1, start_time_str))
		if os.path.isfile('wrfbdy_d01'):
			run(f'ln -sf wrfbdy_d01 wrfbdy_d01_{start_time_str}')
		cli.notice('Succeeded.')
	else:
		run('ls -l wrfinput_* wrfbdy_*')
		cli.notice('File wrfinput_* already exist.')
	os.chdir(old_dir)
