{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib inline\n",
    "import xarray as xr\n",
    "import os\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import collections\n",
    "import warnings \n",
    "from netCDF4 import default_fillvals\n",
    "import configparser\n",
    "import pandas as pd \n",
    "\n",
    "# import soil classification functions\n",
    "from parameter_functions import (is_soil_class, is_param_value, classify_soil_texture)\n",
    "\n",
    "# import veg functions\n",
    "from parameter_functions import (calculate_cv_pft, calculate_nveg_pfts, \n",
    "                                 map_pft_to_nldas_class, is_overstory, \n",
    "                                 calc_root_fract, calc_root_depth_rz1, calc_root_depth_rz2)\n",
    "# import soil layer aggregation functions \n",
    "from parameter_functions import (calculate_first_layer_harmonic_mean, calculate_second_layer_harmonic_mean, \n",
    "                                 calculate_third_layer_harmonic_mean, calculate_first_layer_arithmetic_mean, \n",
    "                                 calculate_second_layer_arithmetic_mean, calculate_third_layer_arithmetic_mean,\n",
    "                                 soil_class_values, calculate_init_moist, calculate_baseflow_parameters,\n",
    "                                 create_empty_arrays, create_parameter_dataset)\n",
    "\n",
    "# define fillvals\n",
    "fillval_f = default_fillvals['f8']\n",
    "fillval_i = default_fillvals['i4']"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "This is a notebook for deriving new VIC 5 parameters in the RASM domain at multiple resolutions. \n",
    "\n",
    "Currently 50km (`wr50a_ar9v`) and 25km (`wr50a_ar9v`) are supported. \n",
    "\n",
    "The accompanying excel sheet `deriving_new_parameters_v2.xlsx` and Word Doc `procedure_for_derivation.docx` provide additional citations and details on methods. "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load configuration file for deriving parameters__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['/p/home/gergel/scripts/rasm-vic5-parameters/regridding/regridding.cfg']"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "cwd = os.getcwd()\n",
    "config = configparser.ConfigParser()\n",
    "config.read(os.path.join(cwd, 'regridding', 'regridding.cfg'))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Set domain file"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "domain = xr.open_dataset(os.path.join(config['Parameter Specs']['domain_file_dir'], \n",
    "                                      config['Parameter Specs']['domain_file']))\n",
    "\n",
    "masknan_vals = domain['mask'].where(domain['mask'] == 1).values"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Set resolution for filenames__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "calculating parameters at 25km\n"
     ]
    }
   ],
   "source": [
    "res = config['Parameter Specs']['res']\n",
    "grid = config['Parameter Specs']['grid']\n",
    "\n",
    "nj = len(domain.nj)\n",
    "ni = len(domain.ni)\n",
    "num_gridcells = nj * ni\n",
    "    \n",
    "num_veg = 17\n",
    "print(\"calculating parameters at %s\" %res)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load 50km and global parameters for some derivations__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "old_params = xr.open_dataset(os.path.join(config['Other']['dir'], \n",
    "                                          config['Other']['old_param_filename']))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Options__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "organic_fract = config.getboolean('Options', 'organic_fract')\n",
    "max_snow_albedo = config.getboolean('Options', 'max_snow_albedo')\n",
    "bulk_density_comb = config.getboolean('Options', 'bulk_density_comb')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load soil data__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "soil_data_vars = collections.OrderedDict()\n",
    "soil_data_vars['silt'] = 'silt_sl*'\n",
    "soil_data_vars['sand'] = 'sand_sl*'\n",
    "soil_data_vars['clay'] = 'clay_sl*'\n",
    "soil_data_vars['bulk_density'] = 'bulk_density_sl*'\n",
    "soil_data_vars['organic_fract'] = 'organic_fract_sl*'\n",
    "\n",
    "# soil data dict with nlayer = 7 (base resolution of data)\n",
    "soil_data = {}\n",
    "soil_data_dir = config['Parameter Specs']['output_dir']\n",
    "for soil_var, soil_wildcard in soil_data_vars.items(): \n",
    "    soil_data[soil_var] = xr.open_mfdataset(os.path.join(soil_data_dir, soil_wildcard),\n",
    "                                            concat_dim='nlayer', \n",
    "                                            data_vars='all', \n",
    "                                            coords='all')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Calculate soil types based on percent clay, percent sand and bulk density__\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "# classify_soil_texture(sand, clay, silt)\n",
    "soil_type_array = xr.apply_ufunc(classify_soil_texture, \n",
    "                                 soil_data['sand']['sand'].where(domain.mask == 1), \n",
    "                                 soil_data['clay']['clay'].where(domain.mask == 1),  \n",
    "                                 soil_data['silt']['silt'].where(domain.mask == 1),\n",
    "                                 dask='allowed',\n",
    "                                 vectorize=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "ksat = xr.apply_ufunc(soil_class_values, \n",
    "                      soil_type_array,\n",
    "                      'ksat',\n",
    "                      dask='allowed',\n",
    "                      vectorize=True)\n",
    "quartz = xr.apply_ufunc(soil_class_values, \n",
    "                        soil_type_array,\n",
    "                        'quartz',\n",
    "                        dask='allowed',\n",
    "                        vectorize=True)\n",
    "Wcr_FRACT = xr.apply_ufunc(soil_class_values, \n",
    "                           soil_type_array,\n",
    "                           'Wcr_FRACT',\n",
    "                           dask='allowed',\n",
    "                           vectorize=True)\n",
    "Wpwp_FRACT = xr.apply_ufunc(soil_class_values, \n",
    "                            soil_type_array,\n",
    "                           'Wpwp_FRACT',\n",
    "                            dask='allowed',\n",
    "                            vectorize=True)\n",
    "b = xr.apply_ufunc(soil_class_values, \n",
    "                   soil_type_array,\n",
    "                   'b',\n",
    "                   dask='allowed',\n",
    "                   vectorize=True)\n",
    "bulk_density_min = xr.apply_ufunc(soil_class_values, \n",
    "                   soil_type_array,\n",
    "                   'bulk_density',\n",
    "                   dask='allowed',\n",
    "                   vectorize=True)\n",
    "resid_moist = xr.apply_ufunc(soil_class_values, \n",
    "                             soil_type_array,\n",
    "                             'resid_moist',\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load regridded GTOPO 30 data, data var for elevation is called `Band1`, in (m)__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "gtopo_filename, gtopo_fileext = os.path.splitext(config['GTOPO']['filename'])\n",
    "gtopo = xr.open_dataset(os.path.join(config['Parameter Specs']['output_dir'], \n",
    "                                     '%s_%s.nc' %(gtopo_filename, grid)))\n",
    "elev = gtopo['Band1']"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load regridded WORLDCLIM climate data for annual t and p__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "worldclim_direc = config['Parameter Specs']['output_dir']\n",
    "prec = xr.open_mfdataset(os.path.join(worldclim_direc, 'prec*'),\n",
    "                                      concat_dim='time', \n",
    "                                      data_vars=['prec'], \n",
    "                                      coords='all')\n",
    "\n",
    "# aggregate to annual, need average annual precip\n",
    "annual_precip = prec['prec'].sum('time')\n",
    "\n",
    "temp = xr.open_mfdataset(os.path.join(worldclim_direc, 'tavg*'),\n",
    "                                      concat_dim='time', \n",
    "                                      data_vars='all', \n",
    "                                      coords='all')\n",
    "tavg = temp['tavg'].mean('time')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load CLM PFTs to use for vegetation parameters__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "pfts_filename, pfts_fileext = os.path.splitext(config['PFTs']['filename'])\n",
    "veg_data = xr.open_dataset(os.path.join(config['Parameter Specs']['output_dir'], \n",
    "                                        '%s_%s.nc' %(pfts_filename, grid)))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Calculate Cv from PFTs__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "cv = xr.apply_ufunc(calculate_cv_pft, \n",
    "                    veg_data['PCT_PFT'].where(domain.mask == 1),\n",
    "                    dask='allowed',\n",
    "                    vectorize=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Calculate number of active PFTs, `Nveg`__ "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "Nveg = xr.apply_ufunc(calculate_nveg_pfts,\n",
    "                      veg_data['PCT_PFT'].where(domain.mask == 1),\n",
    "                      dask='allowed',\n",
    "                      input_core_dims=[['pft']],\n",
    "                      vectorize=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load LAI and vegetation height, `MONTHLY_LAI` and `MONTHLY_HEIGHT_TOP`__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "lai_filename, lai_fileext = os.path.splitext(config['Vegetation']['lai_filename'])\n",
    "lai_file = xr.open_dataset(os.path.join(config['Parameter Specs']['output_dir'], \n",
    "                                   '%s_%s_%s.nc' %(lai_filename, grid, \"lai\")))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "veg_height_filename, veg_height_fileext = os.path.splitext(config['Vegetation']['veg_height_filename'])\n",
    "veg_height_file = xr.open_dataset(os.path.join(config['Parameter Specs']['output_dir'], \n",
    "                                '%s_%s_%s.nc' %(veg_height_filename, grid, \"veg_height\")))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "LAI and veg_height from CLM and `PCT_PFT` from CLM have a different number of PFTs (`PCT_PFT` has one more PFT, 17 vs 16). The extra PFT in `PCT_PFT` has `PCT_PFT` = 0 over the entire RASM domain, so I just slice the LAI and veg_height from the 0th PFT (water/bare soil) and concatenate it for the 16th PFT. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "lai_slice = lai_file['MONTHLY_LAI'].isel(pft = 0)\n",
    "vegheight_slice = veg_height_file['MONTHLY_HEIGHT_TOP'].isel(pft=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "lai = xr.concat([lai_file['MONTHLY_LAI'], lai_slice], dim='pft')\n",
    "veg_height = xr.concat([veg_height_file['MONTHLY_HEIGHT_TOP'], vegheight_slice], dim='pft')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "veg_rough = 0.123 * veg_height\n",
    "displacement = 0.67 * veg_height\n",
    "\n",
    "displacement.values[displacement.values == 0] = 1.0"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Change dims and order of dims of LAI array__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "lai = lai.rename({'time': 'month', 'pft': 'veg_class'})\n",
    "lai = lai.transpose('veg_class', 'month', 'nj', 'ni')\n",
    "\n",
    "veg_rough = veg_rough.rename({'time': 'month', 'pft': 'veg_class'})\n",
    "veg_rough = veg_rough.transpose('veg_class', 'month', 'nj', 'ni')\n",
    "\n",
    "displacement = displacement.rename({'time': 'month', 'pft': 'veg_class'})\n",
    "displacement = displacement.transpose('veg_class', 'month', 'nj', 'ni')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Note__: map albedo, root zone fraction and root zone depth based on vegetation type. see `deriving_new_parameters_v2.xlsx` sheet titled `PFT-NLDAS Mapping` for mapping between NLDAS vegetation classes (used in old VIC 5 parameters) and CLM PFTs. This mapping is based on obvious relationships and some approximations (used for PFTs 8-11)."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Create Dataset for variables and define data_vars__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "arr_months, arr_nlayer, \\\n",
    "arr_rootzone, arr_veg_classes, \\\n",
    "arr_veg_classes_rootzone, arr_veg_classes_month = create_empty_arrays(domain, nj, ni, num_veg)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "# create DataSet\n",
    "params = create_parameter_dataset(domain, old_params, nj, ni, num_veg, organic_fract, max_snow_albedo, \n",
    "                                  bulk_density_comb)\n",
    "\n",
    "# fill in values\n",
    "params['Cv'].values = cv.values\n",
    "params['Nveg'].values = Nveg.values\n",
    "params['LAI'].values = lai.values.reshape(num_veg, 12, nj, ni)\n",
    "params['displacement'].values = displacement.values.reshape(num_veg, 12, nj, ni)\n",
    "params['veg_rough'].values = veg_rough.values.reshape(num_veg, 12, nj, ni)\n",
    "params['elev'].values = elev.values\n",
    "params['avg_T'].values = tavg.values\n",
    "params['annual_prec'].values = annual_precip.values\n",
    "\n",
    "roughness = np.copy(masknan_vals)\n",
    "roughness[np.nonzero(masknan_vals)] = 0.001\n",
    "params['rough'].values = roughness"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "define `trunk_ratio`, `rarc`, `rmin`, `wind_h`, `RGL`, `rad_atten`, `wind_atten`, `overstory`, `max_snow_albedo`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "# trunk ratio, rarc, rad_atten\n",
    "trunk_ratio = np.copy(arr_veg_classes)\n",
    "params['trunk_ratio'].values = trunk_ratio * 0.2\n",
    "# adjust for bare soil \n",
    "params['trunk_ratio'].values[0, :, :] = 0.0\n",
    "\n",
    "rarc = np.copy(arr_veg_classes)\n",
    "params['rarc'].values = rarc * 60\n",
    "# adjust for bare soil\n",
    "params['rarc'].values[0, :, :] = 100\n",
    "\n",
    "rad_atten = np.copy(arr_veg_classes)\n",
    "params['rad_atten'].values = rad_atten * 0.5\n",
    "# adjust for bare soil \n",
    "params['rad_atten'].values[0, :, :] = 0.0\n",
    "\n",
    "wind_atten = np.copy(arr_veg_classes)\n",
    "params['wind_atten'].values = wind_atten * 0.5\n",
    "# adjust for bare soil \n",
    "params['wind_atten'].values[0, :, :] = 0.0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "if max_snow_albedo == True:\n",
    "    # max_albedo\n",
    "    for pft in veg_data.pft.values:\n",
    "        # get nldas mapping from pft\n",
    "        nldas = map_pft_to_nldas_class(pft)\n",
    "        if nldas == 0:\n",
    "            max_alb = 0.34\n",
    "        elif nldas == 1:\n",
    "            max_alb = 0.37\n",
    "        elif nldas == 2:\n",
    "            max_alb = 0.35\n",
    "        elif nldas == 3: \n",
    "            max_alb = 0.35\n",
    "        elif nldas == 4: \n",
    "            max_alb = 0.44\n",
    "        elif nldas == 5:\n",
    "            max_alb = 0.69\n",
    "        elif nldas == 6:\n",
    "            max_alb = 0.43\n",
    "        elif nldas == 7:\n",
    "            max_alb = 0.56\n",
    "        elif nldas == 8:\n",
    "            max_alb = 0.70\n",
    "        elif nldas == 9:\n",
    "            max_alb = 0.65\n",
    "        elif nldas == 10:\n",
    "            max_alb = 0.46\n",
    "        elif nldas == 11:\n",
    "            max_alb = 0.84\n",
    "        params['max_snow_albedo'].values[pft, :, :] = np.ones((1, nj, ni)) * max_alb"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "# rmin, wind_h\n",
    "for pft in veg_data.pft.values:\n",
    "    # get nldas mapping from pft\n",
    "    nldas = map_pft_to_nldas_class(pft)\n",
    "    if nldas >= 0 and nldas <= 3:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=0).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=0).mean())\n",
    "    elif nldas == 4:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=4).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=4).mean())\n",
    "    elif nldas >= 5 and nldas <= 6:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=5).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=5).mean())\n",
    "    elif nldas >= 7 and nldas <= 8:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=7).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=7).mean())\n",
    "    elif nldas == 9:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=9).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=9).mean())\n",
    "    elif nldas == 10:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=10).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=10).mean())\n",
    "    elif nldas == 11:\n",
    "        rmin = np.asscalar(old_params.rmin.isel(veg_class=11).mean())\n",
    "        wind_h = np.asscalar(old_params.wind_h.isel(veg_class=11).mean())\n",
    "    params['rmin'].values[pft, :, :] = np.ones((1, nj, ni)) * rmin\n",
    "    params['wind_h'].values[pft, :, :] = np.ones((1, nj, ni)) * wind_h"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "# RGL\n",
    "for pft in veg_data.pft.values:\n",
    "    # get nldas mapping from pft\n",
    "    nldas = map_pft_to_nldas_class(pft)\n",
    "    if nldas >= 0 and nldas <= 3:\n",
    "        rgl = np.asscalar(old_params.wind_h.isel(veg_class=0).mean())\n",
    "    elif nldas >= 4 and nldas <= 5:\n",
    "        rgl = np.asscalar(old_params.wind_h.isel(veg_class=4).mean())\n",
    "    elif nldas >= 6 and nldas <= 8:\n",
    "        rgl = np.asscalar(old_params.wind_h.isel(veg_class=6).mean())\n",
    "    elif nldas >= 9 and nldas <= 10:\n",
    "        rgl = np.asscalar(old_params.wind_h.isel(veg_class=9).mean())\n",
    "    elif nldas == 11:\n",
    "        rgl = np.asscalar(old_params.wind_h.isel(veg_class=11).mean())\n",
    "    params['RGL'].values[pft, :, :] = np.ones((1, nj, ni)) * rgl"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "# overstory\n",
    "overstory = np.copy(arr_veg_classes)\n",
    "for pft in veg_data.pft.values:\n",
    "    nldas = map_pft_to_nldas_class(pft)\n",
    "    if nldas > 6:\n",
    "        # no overstory\n",
    "        overstory = 0.0\n",
    "    else: \n",
    "        overstory = 1.0\n",
    "    params['overstory'].values[pft, :, :] = overstory"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [],
   "source": [
    "root_depth_rz1 = xr.apply_ufunc(calc_root_depth_rz1,\n",
    "                           params['Cv'].where(domain.mask == 1), \n",
    "                           dask='allowed',\n",
    "                           vectorize=True)\n",
    "root_depth_rz2 = xr.apply_ufunc(calc_root_depth_rz2,\n",
    "                           params['Cv'].where(domain.mask == 1), \n",
    "                           dask='allowed',\n",
    "                           vectorize=True)\n",
    "root_depth = xr.concat([root_depth_rz1, root_depth_rz2],\n",
    "                      dim='root_zone').transpose('veg_class', 'root_zone', 'nj', 'ni')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [],
   "source": [
    "# root fract \n",
    "\n",
    "rz = 0\n",
    "for pft in veg_data.pft.values:\n",
    "    if pft == 0:\n",
    "        root_fract_rz1 = xr.apply_ufunc(calc_root_fract,\n",
    "                                        params['Cv'].isel(veg_class=pft),\n",
    "                                        str(pft),\n",
    "                                        str(rz),\n",
    "                                        dask='allowed',\n",
    "                                        vectorize=True)\n",
    "    else: \n",
    "        root_fract_rz1 = xr.concat([root_fract_rz1, xr.apply_ufunc(calc_root_fract,\n",
    "                                                                   params['Cv'].isel(veg_class=pft),\n",
    "                                                                   str(pft),\n",
    "                                                                   str(rz),\n",
    "                                                                   dask='allowed',\n",
    "                                                                   vectorize=True)],\n",
    "                                  dim='veg_class')\n",
    "rz = 1\n",
    "for pft in veg_data.pft.values:\n",
    "    if pft == 0:\n",
    "        root_fract_rz2 = xr.apply_ufunc(calc_root_fract,\n",
    "                                        params['Cv'].isel(veg_class=pft), \n",
    "                                        str(pft),\n",
    "                                        str(rz),\n",
    "                                        dask='allowed',\n",
    "                                        vectorize=True)\n",
    "    else: \n",
    "        root_fract_rz2 = xr.concat([root_fract_rz2, xr.apply_ufunc(calc_root_fract,\n",
    "                                                                   params['Cv'].isel(veg_class=pft), \n",
    "                                                                   str(pft),\n",
    "                                                                   str(rz),\n",
    "                                                                   dask='allowed',\n",
    "                                                                   vectorize=True)],\n",
    "                                  dim='veg_class')\n",
    "        \n",
    "root_fract = xr.concat([root_fract_rz1, root_fract_rz2], dim='root_zone').transpose('veg_class', 'root_zone', 'nj', 'ni')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [],
   "source": [
    "params['root_depth'].values = root_depth\n",
    "params['root_fract'].values = root_fract"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__albedo__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [],
   "source": [
    "# loop over pft classes and months \n",
    "for pft in veg_data.pft.values:\n",
    "    for month in old_params.month.values:\n",
    "        nldas = map_pft_to_nldas_class(pft)\n",
    "        if nldas == 0 or nldas == 1:\n",
    "            albedo = 0.12\n",
    "        elif nldas >= 2 and nldas <= 5:\n",
    "            albedo = 0.18\n",
    "        elif nldas >= 6 and nldas <= 8:\n",
    "            albedo = 0.19\n",
    "        elif nldas == 9:\n",
    "            albedo = 0.2\n",
    "        elif nldas == 10: \n",
    "            albedo = 0.12\n",
    "        elif nldas == 11: \n",
    "            albedo = 0.2\n",
    "        params['albedo'].values[pft, month-1, :, :] = np.ones((1, 1, nj, ni)) * albedo"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Load hydroclimate classes__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [],
   "source": [
    "hydro_classes = xr.open_dataset(os.path.join(config['Parameter Specs']['output_dir'],\n",
    "                                             'hydroclimate_masks_%s.nc' %grid))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Calculate baseflow parameters: Ds, Dsmax, Ws__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [],
   "source": [
    "soil_direc = config['Soil Data']['ascii_dir']\n",
    "soil_filename = config['Soil Data']['ascii_filename']\n",
    "d1 = calculate_baseflow_parameters(domain, soil_direc, soil_filename, hydro_classes, \"d1\")\n",
    "params['Ds'].values = d1\n",
    "\n",
    "d2 = calculate_baseflow_parameters(domain, soil_direc, soil_filename, hydro_classes, \"d2\")\n",
    "params['Dsmax'].values = d2\n",
    "\n",
    "d3 = calculate_baseflow_parameters(domain, soil_direc, soil_filename, hydro_classes, \"d3\")\n",
    "params['Ws'].values = d3\n",
    "\n",
    "d4 = calculate_baseflow_parameters(domain, soil_direc, soil_filename, hydro_classes, \"d4\")\n",
    "params['c'].values = d4"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__b_i (`infilt`)__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [],
   "source": [
    "bi = np.copy(masknan_vals)\n",
    "bi[np.nonzero(hydro_classes['arid'].values)] = 0.05\n",
    "bi[np.nonzero(hydro_classes['temperate_dry'].values)] = 0.05\n",
    "bi[np.nonzero(hydro_classes['cold_dry_perma'].values)] = 0.3\n",
    "bi[np.nonzero(hydro_classes['cold_dry_noperma'].values)] = 0.5\n",
    "bi[np.nonzero(hydro_classes['cold_wds_ws_perma'].values)] = 0.3\n",
    "bi[np.nonzero(hydro_classes['cold_wds_ws_noperma'].values)] = 0.25\n",
    "bi[np.nonzero(hydro_classes['cold_wds_cs_perma'].values)] = 0.3\n",
    "bi[np.nonzero(hydro_classes['cold_wds_cs_noperma'].values)] = 0.25\n",
    "bi[np.nonzero(hydro_classes['polar'].values)] = 0.35\n",
    "\n",
    "params['infilt'].values = bi"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__soil depths (`depth`)__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [],
   "source": [
    "D1 = np.copy(masknan_vals)\n",
    "D2 = np.copy(masknan_vals)\n",
    "D3 = np.copy(masknan_vals)\n",
    "D1[np.nonzero(domain.mask.values)] = 0.3\n",
    "D3[np.nonzero(domain.mask.values)] = 0.5\n",
    "\n",
    "D2[np.nonzero(hydro_classes['arid'].values)] = 2.0\n",
    "D2[np.nonzero(hydro_classes['temperate_dry'].values)] = 2.0\n",
    "D2[np.nonzero(hydro_classes['cold_dry_perma'].values)] = 0.5\n",
    "D2[np.nonzero(hydro_classes['cold_dry_noperma'].values)] = 0.5\n",
    "D2[np.nonzero(hydro_classes['cold_wds_ws_perma'].values)] = 2.0\n",
    "D2[np.nonzero(hydro_classes['cold_wds_ws_noperma'].values)] = 0.5\n",
    "D2[np.nonzero(hydro_classes['cold_wds_cs_perma'].values)] = 1.1\n",
    "D2[np.nonzero(hydro_classes['cold_wds_cs_noperma'].values)] = 0.3\n",
    "D2[np.nonzero(hydro_classes['polar'].values)] = 0.3\n",
    "\n",
    "\n",
    "depths = np.rollaxis(np.dstack((D1, D2, D3)), axis=2)\n",
    "params['depth'].values = depths"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Aggregate ISRIC soil data to VIC soil depths__"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "first need array of soil depths "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "max soil depth is 2.8 m\n"
     ]
    }
   ],
   "source": [
    "soil_depths = params['depth'].sum(axis=0)\n",
    "print(\"max soil depth is %.1f m\" % soil_depths.max())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [],
   "source": [
    "ksat_l1 = xr.apply_ufunc(calculate_first_layer_harmonic_mean,\n",
    "                         ksat.isel(nlayer=0), \n",
    "                         ksat.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "ksat_l2 = xr.apply_ufunc(calculate_second_layer_harmonic_mean,\n",
    "                         ksat.isel(nlayer=2), \n",
    "                         ksat.isel(nlayer=3),\n",
    "                         ksat.isel(nlayer=4),\n",
    "                         ksat.isel(nlayer=5),\n",
    "                         ksat.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "ksat_l3 = xr.apply_ufunc(calculate_second_layer_harmonic_mean,\n",
    "                         ksat.isel(nlayer=2), \n",
    "                         ksat.isel(nlayer=3),\n",
    "                         ksat.isel(nlayer=4),\n",
    "                         ksat.isel(nlayer=5),\n",
    "                         ksat.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "ksat_vals = np.rollaxis(np.dstack((ksat_l1, ksat_l2, ksat_l3)), \n",
    "                        axis=2)\n",
    "\n",
    "params['Ksat'].values = ksat_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [],
   "source": [
    "# bulk_density\n",
    "bdm_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                         bulk_density_min.isel(nlayer=0), \n",
    "                         bulk_density_min.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "bdm_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         bulk_density_min.isel(nlayer=2), \n",
    "                         bulk_density_min.isel(nlayer=3),\n",
    "                         bulk_density_min.isel(nlayer=4),\n",
    "                         bulk_density_min.isel(nlayer=5),\n",
    "                         bulk_density_min.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "bdm_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         bulk_density_min.isel(nlayer=2), \n",
    "                         bulk_density_min.isel(nlayer=3),\n",
    "                         bulk_density_min.isel(nlayer=4),\n",
    "                         bulk_density_min.isel(nlayer=5),\n",
    "                         bulk_density_min.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "bdm_vals = np.rollaxis(np.dstack((bdm_l1, bdm_l2, bdm_l3)), \n",
    "                        axis=2)\n",
    "params['bulk_density'].values = bdm_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [],
   "source": [
    "# expt\n",
    "b_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                         b.isel(nlayer=0), \n",
    "                         b.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "b_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         b.isel(nlayer=2), \n",
    "                         b.isel(nlayer=3),\n",
    "                         b.isel(nlayer=4),\n",
    "                         b.isel(nlayer=5),\n",
    "                         b.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "b_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         b.isel(nlayer=2), \n",
    "                         b.isel(nlayer=3),\n",
    "                         b.isel(nlayer=4),\n",
    "                         b.isel(nlayer=5),\n",
    "                         b.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "expt_vals = np.rollaxis(np.dstack(((b_l1 * 2) + 3, (b_l2 * 2) + 3, (b_l3 * 2) + 3)), \n",
    "                        axis=2)\n",
    "params['expt'].values = expt_vals\n",
    "params['bubble'].values = (np.copy(params['expt'].values) * 0.32) + 4.3"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [],
   "source": [
    "# resid_moist\n",
    "rm_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                         resid_moist.isel(nlayer=0), \n",
    "                         resid_moist.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "rm_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         resid_moist.isel(nlayer=2), \n",
    "                         resid_moist.isel(nlayer=3),\n",
    "                         resid_moist.isel(nlayer=4),\n",
    "                         resid_moist.isel(nlayer=5),\n",
    "                         resid_moist.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "rm_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         resid_moist.isel(nlayer=2), \n",
    "                         resid_moist.isel(nlayer=3),\n",
    "                         resid_moist.isel(nlayer=4),\n",
    "                         resid_moist.isel(nlayer=5),\n",
    "                         resid_moist.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "rm_vals = np.rollaxis(np.dstack((rm_l1, rm_l2, rm_l3)), \n",
    "                        axis=2)\n",
    "params['resid_moist'].values = rm_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Wcr_FRACT\n",
    "wcr_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                         Wcr_FRACT.isel(nlayer=0), \n",
    "                         Wcr_FRACT.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "wcr_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         Wcr_FRACT.isel(nlayer=2), \n",
    "                         Wcr_FRACT.isel(nlayer=3),\n",
    "                         Wcr_FRACT.isel(nlayer=4),\n",
    "                         Wcr_FRACT.isel(nlayer=5),\n",
    "                         Wcr_FRACT.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "wcr_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         Wcr_FRACT.isel(nlayer=2), \n",
    "                         Wcr_FRACT.isel(nlayer=3),\n",
    "                         Wcr_FRACT.isel(nlayer=4),\n",
    "                         Wcr_FRACT.isel(nlayer=5),\n",
    "                         Wcr_FRACT.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "wcr_vals = np.rollaxis(np.dstack((wcr_l1, wcr_l2, wcr_l3)), \n",
    "                        axis=2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Wpwp_FRACT\n",
    "wpwp_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                         Wpwp_FRACT.isel(nlayer=0), \n",
    "                         Wpwp_FRACT.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "wpwp_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         Wpwp_FRACT.isel(nlayer=2), \n",
    "                         Wpwp_FRACT.isel(nlayer=3),\n",
    "                         Wpwp_FRACT.isel(nlayer=4),\n",
    "                         Wpwp_FRACT.isel(nlayer=5),\n",
    "                         Wpwp_FRACT.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "wpwp_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         Wpwp_FRACT.isel(nlayer=2), \n",
    "                         Wpwp_FRACT.isel(nlayer=3),\n",
    "                         Wpwp_FRACT.isel(nlayer=4),\n",
    "                         Wpwp_FRACT.isel(nlayer=5),\n",
    "                         Wpwp_FRACT.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "wpwp_vals = np.rollaxis(np.dstack((wpwp_l1, wpwp_l2, wpwp_l3)), \n",
    "                        axis=2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [],
   "source": [
    "# quartz\n",
    "qz_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                         quartz.isel(nlayer=0), \n",
    "                         quartz.isel(nlayer=1),\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "qz_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         quartz.isel(nlayer=2), \n",
    "                         quartz.isel(nlayer=3),\n",
    "                         quartz.isel(nlayer=4),\n",
    "                         quartz.isel(nlayer=5),\n",
    "                         quartz.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "\n",
    "qz_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                         quartz.isel(nlayer=2), \n",
    "                         quartz.isel(nlayer=3),\n",
    "                         quartz.isel(nlayer=4),\n",
    "                         quartz.isel(nlayer=5),\n",
    "                         quartz.isel(nlayer=6),\n",
    "                         soil_depths,\n",
    "                         dask='allowed',\n",
    "                         vectorize=True)\n",
    "qz_vals = np.rollaxis(np.dstack((qz_l1, qz_l2, qz_l3)), \n",
    "                        axis=2)\n",
    "params['quartz'].values = qz_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [],
   "source": [
    "if bulk_density_comb == True:\n",
    "    # bulk_density\n",
    "    bd_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=0), \n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=1),\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)\n",
    "\n",
    "    bd_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=2), \n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=3),\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=4),\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=5),\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=6),\n",
    "                             soil_depths,\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)\n",
    "\n",
    "    bd_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=2), \n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=3),\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=4),\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=5),\n",
    "                             soil_data['bulk_density']['bulk_density'].isel(nlayer=6),\n",
    "                             soil_depths,\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)\n",
    "    bd_vals = np.rollaxis(np.dstack((bd_l1, bd_l2, bd_l3)), \n",
    "                            axis=2)\n",
    "    params['bulk_density_comb'].values = bd_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [],
   "source": [
    "if organic_fract == True:\n",
    "    # organic fract\n",
    "    of_l1 = xr.apply_ufunc(calculate_first_layer_arithmetic_mean,\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=0), \n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=1),\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)\n",
    "\n",
    "    of_l2 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=2), \n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=3),\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=4),\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=5),\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=6),\n",
    "                             soil_depths,\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)\n",
    "\n",
    "    of_l3 = xr.apply_ufunc(calculate_second_layer_arithmetic_mean,\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=2), \n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=3),\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=4),\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=5),\n",
    "                             soil_data['organic_fract']['organic_fract'].isel(nlayer=6),\n",
    "                             soil_depths,\n",
    "                             dask='allowed',\n",
    "                             vectorize=True)\n",
    "    of_vals = np.rollaxis(np.dstack(((of_l1/1000), (of_l2/1000), (of_l3/1000))), \n",
    "                            axis=2)\n",
    "    params['organic'].values = of_vals"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Calculate porosity from bulk density and soil density__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [],
   "source": [
    "sd_l1 = np.copy(masknan_vals)\n",
    "sd_l2 = np.copy(masknan_vals)\n",
    "sd_l3 = np.copy(masknan_vals)\n",
    "\n",
    "sd_l1[np.nonzero(masknan_vals)] = 2685.0\n",
    "sd_l2[np.nonzero(masknan_vals)] = 2685.0\n",
    "sd_l3[np.nonzero(masknan_vals)] = 2685.0\n",
    "\n",
    "sd_vals = np.rollaxis(np.dstack((sd_l1, sd_l2, sd_l3)), \n",
    "                        axis=2)\n",
    "params['soil_density'].values = sd_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [],
   "source": [
    "if organic_fract == True:\n",
    "    sd_org_l1 = np.copy(masknan_vals)\n",
    "    sd_org_l2 = np.copy(masknan_vals)\n",
    "    sd_org_l3 = np.copy(masknan_vals)\n",
    "\n",
    "    sd_org_l1[np.nonzero(masknan_vals)] = 1300.0\n",
    "    sd_org_l2[np.nonzero(masknan_vals)] = 1300.0\n",
    "    sd_org_l3[np.nonzero(masknan_vals)] = 1300.0\n",
    "\n",
    "    sd_org_vals = np.rollaxis(np.dstack((sd_org_l1, sd_org_l2, sd_org_l3)), \n",
    "                            axis=2)\n",
    "    params['soil_density_org'].values = sd_org_vals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [],
   "source": [
    "# calculate porosity\n",
    "if bulk_density_comb == True:\n",
    "    porosity = 1 - (params['bulk_density_comb'] / params['soil_density'])\n",
    "else:\n",
    "    porosity = 1 - (params['bulk_density'] / params['soil_density'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [],
   "source": [
    "params['Wpwp_FRACT'].values = wpwp_vals / porosity.values\n",
    "params['Wcr_FRACT'].values = wcr_vals / porosity.values"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Make initial moisture fully saturated__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {},
   "outputs": [],
   "source": [
    "init_moist_l1 = xr.apply_ufunc(calculate_init_moist,\n",
    "                               porosity.isel(nlayer=0), \n",
    "                               params.depth.isel(nlayer=0),\n",
    "                               dask='allowed', \n",
    "                               vectorize=True)\n",
    "init_moist_l2 = xr.apply_ufunc(calculate_init_moist,\n",
    "                               porosity.isel(nlayer=1), \n",
    "                               params.depth.isel(nlayer=1),\n",
    "                               dask='allowed', \n",
    "                               vectorize=True)\n",
    "init_moist_l3 = xr.apply_ufunc(calculate_init_moist,\n",
    "                               porosity.isel(nlayer=1), \n",
    "                               params.depth.isel(nlayer=2),\n",
    "                               dask='allowed', \n",
    "                               vectorize=True)\n",
    "init_moist_vals = np.rollaxis(np.dstack((init_moist_l1, init_moist_l2, init_moist_l3)), \n",
    "                        axis=2)\n",
    "params['init_moist'].values = init_moist_vals"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Add `off_gmt`__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [],
   "source": [
    "if res == \"50km\":\n",
    "    params['off_gmt'].values = old_params['off_gmt'].values\n",
    "\n",
    "else:\n",
    "    # load regridded off_gmt\n",
    "    gmt_filename, gmt_fileext = os.path.splitext(config['Other']['gmt_regrid_filename'])\n",
    "    off_gmt = xr.open_dataset(os.path.join(config['Parameter Specs']['output_dir'], \n",
    "                                           '%s_%s.nc' %(gmt_filename, grid)))\n",
    "    params['off_gmt'].values = off_gmt['off_gmt'].values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 52,
   "metadata": {},
   "outputs": [],
   "source": [
    "phi_s = np.copy(arr_nlayer)\n",
    "phi_s[np.nonzero(arr_nlayer)] = np.asscalar(old_params['phi_s'].mean())\n",
    "params['phi_s'].values = phi_s\n",
    "\n",
    "# use domain mask since frozen soils should be True for all gridcells\n",
    "params['fs_active'].values = domain['mask'].values\n",
    "\n",
    "dp = np.copy(masknan_vals)\n",
    "dp[np.nonzero(masknan_vals)] = np.asscalar(old_params['dp'].mean())\n",
    "params['dp'].values = dp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {},
   "outputs": [],
   "source": [
    "snow_rough = np.copy(masknan_vals)\n",
    "snow_rough[np.nonzero(masknan_vals)] = 0.0024\n",
    "params['snow_rough'].values = snow_rough"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Make gridcell number array__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [],
   "source": [
    "# add run_cell, mask, xv and yv, xc, yc, gridcell, lats, lons\n",
    "runcell = np.copy(masknan_vals)\n",
    "runcell[np.nonzero(masknan_vals)] = 1\n",
    "params['run_cell'].values = runcell\n",
    "params['mask'].values = domain['mask'].values\n",
    "\n",
    "if res == \"50km\":\n",
    "    gc_arr = old_params['gridcell'].values\n",
    "else: \n",
    "    gridcell_nums = np.linspace(1, nj*ni, nj*ni, endpoint=True, dtype='int32')\n",
    "    # gc_arr = np.fliplr(np.flipud(gridcell_nums.reshape(nj, ni)))\n",
    "    gc_arr = gridcell_nums.reshape(nj, ni)\n",
    "    \n",
    "params['gridcell'].values = gc_arr\n",
    "params['lats'].values = domain['yc'].where(domain.mask==1).values\n",
    "if res == \"50km\":\n",
    "    params['lons'].values = old_params['lons'].where(domain.mask==1).values\n",
    "else:\n",
    "    params['lons'].values = domain['xc'].where(domain.mask==1).values\n",
    "params['xc'].values = domain['xc'].values\n",
    "params['yc'].values = domain['yc'].values\n",
    "\n",
    "params['xv'].values = np.rollaxis(domain['xv'].values, axis=2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "swap 0th and 16th veg class to accommodate bare soil. \n",
    "\n",
    "veg class vars: Cv, Nveg, trunk_ratio, rarc, rmin, wind_h, RGL, rad_atten, wind_atten, albedo, LAI, overstory, displacement, veg_rough, root_depth, root_fract"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "additional organic fract parameters if that option is set to True: \n",
    "\n",
    "soil particle density of OM \n",
    "\n",
    "bulk density of OM\n",
    "\n",
    "organic content of soil (fraction of total soil volume)\n",
    "\n",
    "Note: Organic matter (%) = Total organic carbon (%) x 1.72, from http://www.soilquality.org.au/factsheets/organic-carbon"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {},
   "outputs": [],
   "source": [
    "if max_snow_albedo == True:\n",
    "    veg_class_vars = ['Cv', 'trunk_ratio', 'rarc', 'rmin', 'wind_h', 'RGL', 'rad_atten',\n",
    "                  'wind_atten', 'albedo', 'LAI', 'overstory',\n",
    "                  'root_depth', 'root_fract', 'displacement', 'veg_rough', 'max_snow_albedo']\n",
    "else: \n",
    "    veg_class_vars = ['Cv', 'trunk_ratio', 'rarc', 'rmin', 'wind_h', 'RGL', 'rad_atten',\n",
    "                  'wind_atten', 'albedo', 'LAI', 'overstory',\n",
    "                  'root_depth', 'root_fract', 'displacement', 'veg_rough']\n",
    "\n",
    "for veg_class_var in veg_class_vars: \n",
    "    if ((veg_class_var == \"LAI\") or (veg_class_var == \"albedo\") or (veg_class_var == \"root_depth\") \n",
    "    or (veg_class_var == \"root_fract\") or (veg_class_var == \"veg_rough\") or \n",
    "        (veg_class_var == \"displacement\")):\n",
    "        bare = np.copy(params[veg_class_var].isel(veg_class=0))\n",
    "        last = np.copy(params[veg_class_var].isel(veg_class=16))\n",
    "        params[veg_class_var].values[0, :, :, :] = last\n",
    "        params[veg_class_var].values[16, :, :, :] = bare\n",
    "    else:\n",
    "        bare = np.copy(params[veg_class_var].isel(veg_class=0))\n",
    "        last = np.copy(params[veg_class_var].isel(veg_class=16))\n",
    "        params[veg_class_var].values[0, :, :] = last\n",
    "        params[veg_class_var].values[16, :, :] = bare\n",
    "params['root_fract'].values[16, :, :, :] = 0\n",
    "params['root_depth'].values[16, :, :, :] = 0\n",
    "params['displacement'].values[16, :, :, :] = 0\n",
    "params['veg_rough'].values[16, :, :, :] = 0\n",
    "params['overstory'].values[16, :, :] = 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "metadata": {},
   "outputs": [],
   "source": [
    "# adjust data vars that need adjusting \n",
    "params['run_cell'].values = domain['mask'].where(domain.mask == 1)\n",
    "params['gridcell'].values = params['gridcell'].where(domain.mask == 1)\n",
    "params['Ksat'].values = params['Ksat'].where(domain.mask == 1)\n",
    "params['expt'].values = params['expt'].where(domain.mask == 1)\n",
    "params['bubble'].values = params['bubble'].where(domain.mask == 1)\n",
    "params['Wpwp_FRACT'].values = params['Wpwp_FRACT'].where(domain.mask == 1)\n",
    "params['Wcr_FRACT'].values = params['Wcr_FRACT'].where(domain.mask == 1)\n",
    "params['resid_moist'].values = params['resid_moist'].where(domain.mask == 1)\n",
    "params['quartz'].values = params['quartz'].where(domain.mask == 1)\n",
    "if bulk_density_comb == True:\n",
    "    params['bulk_density_comb'].values = params['bulk_density_comb'].where(domain.mask == 1)\n",
    "params['bulk_density'].values = params['bulk_density'].where(domain.mask == 1)\n",
    "params['soil_density'].values = params['soil_density'].where(domain.mask == 1)\n",
    "params['c'].values = params['c'].where(domain.mask == 1)\n",
    "params['dp'].values = params['dp'].where(domain.mask == 1)\n",
    "params['snow_rough'].values = params['snow_rough'].where(domain.mask == 1)\n",
    "params['Nveg'].values = params['Nveg'].where(domain.mask == 1)\n",
    "params['trunk_ratio'] = params['trunk_ratio'].where(domain.mask == 1)\n",
    "params['rarc'] = params['rarc'].where(domain.mask == 1)\n",
    "params['phi_s'] = params['phi_s'].where(domain.mask == 1)\n",
    "params['rmin'] = params['rmin'].where(domain.mask == 1)\n",
    "params['wind_h'] = params['wind_h'].where(domain.mask == 1)\n",
    "params['RGL'] = params['RGL'].where(domain.mask == 1)\n",
    "params['rad_atten'] = params['rad_atten'].where(domain.mask == 1)\n",
    "params['wind_atten'] = params['wind_atten'].where(domain.mask == 1)\n",
    "if max_snow_albedo == True:\n",
    "    params['max_snow_albedo'] = params['max_snow_albedo'].where(domain.mask == 1)\n",
    "params['root_depth'] = params['root_depth'].where(domain.mask == 1)\n",
    "params['root_fract'] = params['root_fract'].where(domain.mask == 1)\n",
    "params['albedo'] = params['albedo'].where(domain.mask == 1)\n",
    "params['LAI'] = params['LAI'].where(domain.mask == 1)\n",
    "params['overstory'] = params['overstory'].where(domain.mask == 1)\n",
    "params['displacement'] = params['displacement'].where(domain.mask == 1)\n",
    "params['veg_rough'] = params['veg_rough'].where(domain.mask == 1)\n",
    "params['elev'] = params['elev'].where(domain.mask == 1)\n",
    "params['avg_T'] = params['avg_T'].where(domain.mask == 1)\n",
    "params['annual_prec'] = params['annual_prec'].where(domain.mask == 1)\n",
    "params['rough'] = params['rough'].where(domain.mask == 1)\n",
    "if organic_fract == True:\n",
    "    params['organic'] = params['organic'].where(domain.mask == 1)\n",
    "    params['soil_density_org'] = params['soil_density_org'].where(domain.mask == 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "metadata": {},
   "outputs": [],
   "source": [
    "max_moist = params['depth'] * porosity * 1000\n",
    "Wcr = params['Wcr_FRACT'] * max_moist\n",
    "Wpwp = params['Wpwp_FRACT'] * max_moist\n",
    "resid_moist_mm = params['resid_moist'] * params['depth'] * 1000"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Some quick tests to ensure that the parameters don't make VIC crash:__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {},
   "outputs": [],
   "source": [
    "if (Wcr.where(Wpwp > Wcr).sum()) > 0:\n",
    "    raise AssertionError(\"wilting point moisture is greater than critical point moisture\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [],
   "source": [
    "if (params['resid_moist'].where(Wpwp < params['resid_moist']).sum()) > 0:\n",
    "    raise AssertionError(\"wilting point moisture is less than residual moisture\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Wpwp_FRACT MUST be >= resid_moist / (1.0 - bulk_density/soil_density).\n",
    "if params['Wpwp_FRACT'].where(params['Wpwp_FRACT'] < params['resid_moist'] / porosity).sum() > 0:\n",
    "    raise AssertionError(\"wilting point moisture is less than residual moisture\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "__Save new parameters to NetCDF, location specified by `outdir` in config file__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Organic Fract is True, including soil_dens_org and organic_fract in parameters file\n",
      "Max Snow Albedo is True, including veg-dep snow albedo in parameters file\n",
      "Bulk Density Comb is True, including combined bulk density in parameters file\n",
      "saved new parameters to /p/home/gergel/data/parameters/25km/new_vic5_params_wr25b_ar9v4_all_options.nc\n"
     ]
    }
   ],
   "source": [
    "encoding_params = {'run_cell': {'dtype': 'int32', \"_FillValue\": fillval_i}, \n",
    "                   'gridcell': {'dtype': 'int32', \"_FillValue\": fillval_i}, \n",
    "                   'fs_active': {'dtype': 'int32', \"_FillValue\": fillval_i}, \n",
    "                   'Nveg': {'dtype': 'int32', \"_FillValue\": fillval_i},\n",
    "                   'overstory': {'dtype': 'int32', \"_FillValue\": fillval_i},\n",
    "                   'veg_class': {'dtype': 'int32'}}\n",
    "\n",
    "direc = config['Parameter Specs']['output_dir']\n",
    "\n",
    "if organic_fract == True and bulk_density_comb == True and max_snow_albedo == True:\n",
    "    filename = 'new_vic5_params_%s_%s.nc' %(grid, \"all_options\")\n",
    "elif organic_fract == False and bulk_density_comb == True and max_snow_albedo == False:\n",
    "    filename = 'new_vic5_params_%s_%s.nc' %(grid, \"bulk_density\")\n",
    "else: \n",
    "    filename = 'new_vic5_params_%s_%s.nc' %(grid, \"no_options\")\n",
    "    \n",
    "new_params_file = os.path.join(direc, filename)\n",
    "params.to_netcdf(new_params_file, format='NETCDF4_CLASSIC', encoding=encoding_params)\n",
    "\n",
    "if organic_fract:\n",
    "    print(\"Organic Fract is True, including soil_dens_org and organic_fract in parameters file\")\n",
    "if max_snow_albedo:\n",
    "    print(\"Max Snow Albedo is True, including veg-dep snow albedo in parameters file\")\n",
    "if bulk_density_comb:\n",
    "    print(\"Bulk Density Comb is True, including combined bulk density in parameters file\")\n",
    "    \n",
    "print(\"saved new parameters to %s\" %new_params_file)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
