<!DOCTYPE html>

<html xmlns="http://www.w3.org/1999/xhtml">

<head>

<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="pandoc" />

<meta name="viewport" content="width=device-width, initial-scale=1">

<meta name="author" content="Arezoo Rafieeinasab" />

<meta name="date" content="2017-05-01" />

<title>Precipitation Evaluation</title>



<style type="text/css">code{white-space: pre;}</style>
<style type="text/css">
table.sourceCode, tr.sourceCode, td.lineNumbers, td.sourceCode {
  margin: 0; padding: 0; vertical-align: baseline; border: none; }
table.sourceCode { width: 100%; line-height: 100%; }
td.lineNumbers { text-align: right; padding-right: 4px; padding-left: 4px; color: #aaaaaa; border-right: 1px solid #aaaaaa; }
td.sourceCode { padding-left: 5px; }
code > span.kw { color: #007020; font-weight: bold; }
code > span.dt { color: #902000; }
code > span.dv { color: #40a070; }
code > span.bn { color: #40a070; }
code > span.fl { color: #40a070; }
code > span.ch { color: #4070a0; }
code > span.st { color: #4070a0; }
code > span.co { color: #60a0b0; font-style: italic; }
code > span.ot { color: #007020; }
code > span.al { color: #ff0000; font-weight: bold; }
code > span.fu { color: #06287e; }
code > span.er { color: #ff0000; font-weight: bold; }
</style>



<link href="data:text/css,body%20%7B%0A%20%20background%2Dcolor%3A%20%23fff%3B%0A%20%20margin%3A%201em%20auto%3B%0A%20%20max%2Dwidth%3A%20700px%3B%0A%20%20overflow%3A%20visible%3B%0A%20%20padding%2Dleft%3A%202em%3B%0A%20%20padding%2Dright%3A%202em%3B%0A%20%20font%2Dfamily%3A%20%22Open%20Sans%22%2C%20%22Helvetica%20Neue%22%2C%20Helvetica%2C%20Arial%2C%20sans%2Dserif%3B%0A%20%20font%2Dsize%3A%2014px%3B%0A%20%20line%2Dheight%3A%201%2E35%3B%0A%7D%0A%0A%23header%20%7B%0A%20%20text%2Dalign%3A%20center%3B%0A%7D%0A%0A%23TOC%20%7B%0A%20%20clear%3A%20both%3B%0A%20%20margin%3A%200%200%2010px%2010px%3B%0A%20%20padding%3A%204px%3B%0A%20%20width%3A%20400px%3B%0A%20%20border%3A%201px%20solid%20%23CCCCCC%3B%0A%20%20border%2Dradius%3A%205px%3B%0A%0A%20%20background%2Dcolor%3A%20%23f6f6f6%3B%0A%20%20font%2Dsize%3A%2013px%3B%0A%20%20line%2Dheight%3A%201%2E3%3B%0A%7D%0A%20%20%23TOC%20%2Etoctitle%20%7B%0A%20%20%20%20font%2Dweight%3A%20bold%3B%0A%20%20%20%20font%2Dsize%3A%2015px%3B%0A%20%20%20%20margin%2Dleft%3A%205px%3B%0A%20%20%7D%0A%0A%20%20%23TOC%20ul%20%7B%0A%20%20%20%20padding%2Dleft%3A%2040px%3B%0A%20%20%20%20margin%2Dleft%3A%20%2D1%2E5em%3B%0A%20%20%20%20margin%2Dtop%3A%205px%3B%0A%20%20%20%20margin%2Dbottom%3A%205px%3B%0A%20%20%7D%0A%20%20%23TOC%20ul%20ul%20%7B%0A%20%20%20%20margin%2Dleft%3A%20%2D2em%3B%0A%20%20%7D%0A%20%20%23TOC%20li%20%7B%0A%20%20%20%20line%2Dheight%3A%2016px%3B%0A%20%20%7D%0A%0Atable%20%7B%0A%20%20margin%3A%201em%20auto%3B%0A%20%20border%2Dwidth%3A%201px%3B%0A%20%20border%2Dcolor%3A%20%23DDDDDD%3B%0A%20%20border%2Dstyle%3A%20outset%3B%0A%20%20border%2Dcollapse%3A%20collapse%3B%0A%7D%0Atable%20th%20%7B%0A%20%20border%2Dwidth%3A%202px%3B%0A%20%20padding%3A%205px%3B%0A%20%20border%2Dstyle%3A%20inset%3B%0A%7D%0Atable%20td%20%7B%0A%20%20border%2Dwidth%3A%201px%3B%0A%20%20border%2Dstyle%3A%20inset%3B%0A%20%20line%2Dheight%3A%2018px%3B%0A%20%20padding%3A%205px%205px%3B%0A%7D%0Atable%2C%20table%20th%2C%20table%20td%20%7B%0A%20%20border%2Dleft%2Dstyle%3A%20none%3B%0A%20%20border%2Dright%2Dstyle%3A%20none%3B%0A%7D%0Atable%20thead%2C%20table%20tr%2Eeven%20%7B%0A%20%20background%2Dcolor%3A%20%23f7f7f7%3B%0A%7D%0A%0Ap%20%7B%0A%20%20margin%3A%200%2E5em%200%3B%0A%7D%0A%0Ablockquote%20%7B%0A%20%20background%2Dcolor%3A%20%23f6f6f6%3B%0A%20%20padding%3A%200%2E25em%200%2E75em%3B%0A%7D%0A%0Ahr%20%7B%0A%20%20border%2Dstyle%3A%20solid%3B%0A%20%20border%3A%20none%3B%0A%20%20border%2Dtop%3A%201px%20solid%20%23777%3B%0A%20%20margin%3A%2028px%200%3B%0A%7D%0A%0Adl%20%7B%0A%20%20margin%2Dleft%3A%200%3B%0A%7D%0A%20%20dl%20dd%20%7B%0A%20%20%20%20margin%2Dbottom%3A%2013px%3B%0A%20%20%20%20margin%2Dleft%3A%2013px%3B%0A%20%20%7D%0A%20%20dl%20dt%20%7B%0A%20%20%20%20font%2Dweight%3A%20bold%3B%0A%20%20%7D%0A%0Aul%20%7B%0A%20%20margin%2Dtop%3A%200%3B%0A%7D%0A%20%20ul%20li%20%7B%0A%20%20%20%20list%2Dstyle%3A%20circle%20outside%3B%0A%20%20%7D%0A%20%20ul%20ul%20%7B%0A%20%20%20%20margin%2Dbottom%3A%200%3B%0A%20%20%7D%0A%0Apre%2C%20code%20%7B%0A%20%20background%2Dcolor%3A%20%23f7f7f7%3B%0A%20%20border%2Dradius%3A%203px%3B%0A%20%20color%3A%20%23333%3B%0A%20%20white%2Dspace%3A%20pre%2Dwrap%3B%20%20%20%20%2F%2A%20Wrap%20long%20lines%20%2A%2F%0A%7D%0Apre%20%7B%0A%20%20border%2Dradius%3A%203px%3B%0A%20%20margin%3A%205px%200px%2010px%200px%3B%0A%20%20padding%3A%2010px%3B%0A%7D%0Apre%3Anot%28%5Bclass%5D%29%20%7B%0A%20%20background%2Dcolor%3A%20%23f7f7f7%3B%0A%7D%0A%0Acode%20%7B%0A%20%20font%2Dfamily%3A%20Consolas%2C%20Monaco%2C%20%27Courier%20New%27%2C%20monospace%3B%0A%20%20font%2Dsize%3A%2085%25%3B%0A%7D%0Ap%20%3E%20code%2C%20li%20%3E%20code%20%7B%0A%20%20padding%3A%202px%200px%3B%0A%7D%0A%0Adiv%2Efigure%20%7B%0A%20%20text%2Dalign%3A%20center%3B%0A%7D%0Aimg%20%7B%0A%20%20background%2Dcolor%3A%20%23FFFFFF%3B%0A%20%20padding%3A%202px%3B%0A%20%20border%3A%201px%20solid%20%23DDDDDD%3B%0A%20%20border%2Dradius%3A%203px%3B%0A%20%20border%3A%201px%20solid%20%23CCCCCC%3B%0A%20%20margin%3A%200%205px%3B%0A%7D%0A%0Ah1%20%7B%0A%20%20margin%2Dtop%3A%200%3B%0A%20%20font%2Dsize%3A%2035px%3B%0A%20%20line%2Dheight%3A%2040px%3B%0A%7D%0A%0Ah2%20%7B%0A%20%20border%2Dbottom%3A%204px%20solid%20%23f7f7f7%3B%0A%20%20padding%2Dtop%3A%2010px%3B%0A%20%20padding%2Dbottom%3A%202px%3B%0A%20%20font%2Dsize%3A%20145%25%3B%0A%7D%0A%0Ah3%20%7B%0A%20%20border%2Dbottom%3A%202px%20solid%20%23f7f7f7%3B%0A%20%20padding%2Dtop%3A%2010px%3B%0A%20%20font%2Dsize%3A%20120%25%3B%0A%7D%0A%0Ah4%20%7B%0A%20%20border%2Dbottom%3A%201px%20solid%20%23f7f7f7%3B%0A%20%20margin%2Dleft%3A%208px%3B%0A%20%20font%2Dsize%3A%20105%25%3B%0A%7D%0A%0Ah5%2C%20h6%20%7B%0A%20%20border%2Dbottom%3A%201px%20solid%20%23ccc%3B%0A%20%20font%2Dsize%3A%20105%25%3B%0A%7D%0A%0Aa%20%7B%0A%20%20color%3A%20%230033dd%3B%0A%20%20text%2Ddecoration%3A%20none%3B%0A%7D%0A%20%20a%3Ahover%20%7B%0A%20%20%20%20color%3A%20%236666ff%3B%20%7D%0A%20%20a%3Avisited%20%7B%0A%20%20%20%20color%3A%20%23800080%3B%20%7D%0A%20%20a%3Avisited%3Ahover%20%7B%0A%20%20%20%20color%3A%20%23BB00BB%3B%20%7D%0A%20%20a%5Bhref%5E%3D%22http%3A%22%5D%20%7B%0A%20%20%20%20text%2Ddecoration%3A%20underline%3B%20%7D%0A%20%20a%5Bhref%5E%3D%22https%3A%22%5D%20%7B%0A%20%20%20%20text%2Ddecoration%3A%20underline%3B%20%7D%0A%0A%2F%2A%20Class%20described%20in%20https%3A%2F%2Fbenjeffrey%2Ecom%2Fposts%2Fpandoc%2Dsyntax%2Dhighlighting%2Dcss%0A%20%20%20Colours%20from%20https%3A%2F%2Fgist%2Egithub%2Ecom%2Frobsimmons%2F1172277%20%2A%2F%0A%0Acode%20%3E%20span%2Ekw%20%7B%20color%3A%20%23555%3B%20font%2Dweight%3A%20bold%3B%20%7D%20%2F%2A%20Keyword%20%2A%2F%0Acode%20%3E%20span%2Edt%20%7B%20color%3A%20%23902000%3B%20%7D%20%2F%2A%20DataType%20%2A%2F%0Acode%20%3E%20span%2Edv%20%7B%20color%3A%20%2340a070%3B%20%7D%20%2F%2A%20DecVal%20%28decimal%20values%29%20%2A%2F%0Acode%20%3E%20span%2Ebn%20%7B%20color%3A%20%23d14%3B%20%7D%20%2F%2A%20BaseN%20%2A%2F%0Acode%20%3E%20span%2Efl%20%7B%20color%3A%20%23d14%3B%20%7D%20%2F%2A%20Float%20%2A%2F%0Acode%20%3E%20span%2Ech%20%7B%20color%3A%20%23d14%3B%20%7D%20%2F%2A%20Char%20%2A%2F%0Acode%20%3E%20span%2Est%20%7B%20color%3A%20%23d14%3B%20%7D%20%2F%2A%20String%20%2A%2F%0Acode%20%3E%20span%2Eco%20%7B%20color%3A%20%23888888%3B%20font%2Dstyle%3A%20italic%3B%20%7D%20%2F%2A%20Comment%20%2A%2F%0Acode%20%3E%20span%2Eot%20%7B%20color%3A%20%23007020%3B%20%7D%20%2F%2A%20OtherToken%20%2A%2F%0Acode%20%3E%20span%2Eal%20%7B%20color%3A%20%23ff0000%3B%20font%2Dweight%3A%20bold%3B%20%7D%20%2F%2A%20AlertToken%20%2A%2F%0Acode%20%3E%20span%2Efu%20%7B%20color%3A%20%23900%3B%20font%2Dweight%3A%20bold%3B%20%7D%20%2F%2A%20Function%20calls%20%2A%2F%20%0Acode%20%3E%20span%2Eer%20%7B%20color%3A%20%23a61717%3B%20background%2Dcolor%3A%20%23e3d2d2%3B%20%7D%20%2F%2A%20ErrorTok%20%2A%2F%0A%0A" rel="stylesheet" type="text/css" />

</head>

<body>




<h1 class="title toc-ignore">Precipitation Evaluation</h1>
<h4 class="author"><em>Arezoo Rafieeinasab</em></h4>
<h4 class="date"><em>2017-05-01</em></h4>



<div id="background" class="section level1">
<h1>Background</h1>
<p>Forcing could be stored in multiple files, either in input forcing files (such as <em>LDASIN</em> or <em>PRECIP_FORCING</em> files) or in the output files (<em>LDASOUT</em>). <em>LDASOUT</em> files may contain a variable called <em>ACCPRCP</em> storing the accumulated precipitation, and the rainfall depth can be obtained by subtracting two consecutive time steps. <em>LDASIN</em> and <em>PRECIP_FORCING</em> files usually store rain rate in <em>RAINRATE</em> and <em>precip_rate</em> variables. This vignette serve as a short explanation of how to retrieve data and perform some basic comparisons between two set of data.</p>
<p>Load the rwrfhydro package.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="kw">library</span>(rwrfhydro)</code></pre>
</div>
<div id="import-observed-datasets" class="section level1">
<h1>Import observed datasets</h1>
<p>Functions to retrieve observation data for several observational network is provided in rwrfhydro. GHCN-Daily and USCRN networks are introduced and used in this vignette.</p>
</div>
<div id="uscrn" class="section level1">
<h1>USCRN</h1>
<p>The US. Climate Reference Network (USCRN) is a network of monitoring stations equipped with research quality instruments. Beside precipitation, these gauges report temperature, soil moisture and soil temperature. The precipitation is measured every 5 minutes using three independent measurements in a weighing bucket gauge accompanied with a disdrometer reporting the presence or absence of precipitation. These gauges, in the cold climate, are equipped with heating tape around the throat of the weighing gauge to prevent the frozen precipitation from accumulating on the interior walls and capping the gauge. The redundancy in the measurements is to ensure the quality of the measurements.</p>
<p>Data is provided in 4 different temporal resolution (subhourly, hourly, daily and monthly), and depending on the temporal resolution, the variables provided changes. For more infoamtion on the data and how to retrieve, refer to the the man page of <code>Get_USCRN</code>.</p>
</div>
<div id="ghcn-daily" class="section level1">
<h1>GHCN-daily</h1>
<p>Global Historical Climatology Network-Daily (GHCN-D) dataset contains daily data from around 80000 surface station in the world, which about two third of them are precipitation only (Menne et al. 2012). It is the most complete collection of U.S. daily data available (Menne et al. 2012). The dataset undergo an automated quality assurance which the details can be found in Durre et al. 2008; 2010. Data is available on <a href="http://www1.ncdc.noaa.gov/pub/data/ghcn/daily"><a href="http://www1.ncdc.noaa.gov/pub/data/ghcn/daily" class="uri">http://www1.ncdc.noaa.gov/pub/data/ghcn/daily</a></a> and is updated frequently. Data is available in two formats either categorized by gauge station or categorized by year. Accordingly, there are two function to pull GHCN-daily data from these two sources called <code>GetGhcn</code> and <code>GetGhcn2</code>.</p>
<div id="gauge-selection" class="section level2">
<h2>Gauge selection</h2>
<p>First step is to select the gauges you want to use for verification based on some criteria. GHCN-daily contains the precipitation data from different sources such as COOP or CoCoRaHS. The selection criteria can be country code, states if country is US, type of rain gauge network (for example CoCoRaHS), or a rectangle domain.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co">#setInternet2(use=FALSE) # If using windows, you may need this.</span>

<span class="co"># Return all the gauges within US from observation network of COOP (C) and CoCoRaHS (1)</span>
countryCodeList &lt;-<span class="st"> </span><span class="kw">c</span>(<span class="st">&quot;US&quot;</span>)
networkCodeList &lt;-<span class="st"> </span><span class="kw">c</span>(<span class="st">&quot;1&quot;</span>,<span class="st">&quot;C&quot;</span>)
sg &lt;-<span class="st"> </span><span class="kw">SelectGhcnGauges</span>(<span class="dt">countryCode=</span>countryCodeList,
                       <span class="dt">networkCode=</span>networkCodeList)
<span class="kw">str</span>(sg)</code></pre>
<p>The sg dataframe has all the information provided by NCDC about each gauge. For the rest of this vignette we will use only the domain of Fourmile Creek which is the case study provided. We use the rectangle domain containing Fourmile Creek, as the boundary to collect all the gauges information.</p>
<pre class="sourceCode r"><code class="sourceCode r">sg &lt;-<span class="st"> </span><span class="kw">SelectGhcnGauges</span>(<span class="dt">domain =</span> <span class="ot">TRUE</span>, <span class="dt">minLat =</span> <span class="fl">40.0125</span>, <span class="dt">maxLat =</span> <span class="fl">40.0682</span>, 
                       <span class="dt">minLon =</span> -<span class="fl">105.562</span>, <span class="dt">maxLon=</span>-<span class="fl">105.323</span>)
<span class="kw">str</span>(sg)</code></pre>
<pre><code>## 'data.frame':    11 obs. of  12 variables:
##  $ country    : chr  &quot;US&quot; &quot;US&quot; &quot;US&quot; &quot;US&quot; ...
##  $ network    : chr  &quot;1&quot; &quot;1&quot; &quot;1&quot; &quot;1&quot; ...
##  $ stationID  : chr  &quot;COBO0063&quot; &quot;COBO0159&quot; &quot;COBO0280&quot; &quot;COBO0297&quot; ...
##  $ latitude   : num  40 40 40 40 40.1 ...
##  $ longitude  : num  -106 -105 -105 -105 -105 ...
##  $ elevation  : num  3026 2105 2319 2335 2032 ...
##  $ state      : chr  &quot;CO&quot; &quot;CO&quot; &quot;CO&quot; &quot;CO&quot; ...
##  $ name       : chr  &quot;NEDERLAND 5.2 NNW             &quot; &quot;BOULDER 3.6 WNW               &quot; &quot;BOULDER 7.0 W                 &quot; &quot;BOULDER 7.4 W                 &quot; ...
##  $ GSNflag    : chr  &quot;   &quot; &quot;   &quot; &quot;   &quot; &quot;   &quot; ...
##  $ HCN/CRNflag: chr  &quot;   &quot; &quot;   &quot; &quot;   &quot; &quot;   &quot; ...
##  $ WMOID      : chr  &quot;    &quot; &quot;    &quot; &quot;    &quot; &quot;    &quot; ...
##  $ siteIds    : chr  &quot;US1COBO0063&quot; &quot;US1COBO0159&quot; &quot;US1COBO0280&quot; &quot;US1COBO0297&quot; ...</code></pre>
</div>
<div id="getghcn" class="section level2">
<h2>GetGhcn</h2>
<p>GHCN-daily data are archived for each individual gauge in a text file in <a href="http://www1.ncdc.noaa.gov/pub/data/ghcn/daily/all/" class="uri">http://www1.ncdc.noaa.gov/pub/data/ghcn/daily/all/</a>. Precipitating can be downloaded for a single site or multiple ones by setting element to “PRCP” and specifying the desired start and end date. Notice, precipitation values are converted from 10th of mm to mm.</p>
<pre class="sourceCode r"><code class="sourceCode r">startDate &lt;-<span class="st"> &quot;2013/01/01&quot;</span>
endDate &lt;-<span class="st"> &quot;2013/09/30&quot;</span>
element &lt;-<span class="st"> &quot;PRCP&quot;</span>
obsPrcp &lt;-<span class="st"> </span><span class="kw">GetGhcn</span>(sg$siteIds, element, startDate, endDate, <span class="dt">parallel =</span> <span class="ot">FALSE</span>)
<span class="kw">str</span>(obsPrcp)</code></pre>
<pre><code>## 'data.frame':    1273 obs. of  5 variables:
##  $ siteIds  : chr  &quot;US1COBO0159&quot; &quot;US1COBO0159&quot; &quot;US1COBO0159&quot; &quot;US1COBO0159&quot; ...
##  $ Date     : Date, format: &quot;2013-01-01&quot; &quot;2013-02-01&quot; ...
##  $ dailyGhcn: num  0 -1000 -1000 -1000 18 ...
##  $ qFlag    : chr  NA NA NA NA ...
##  $ element  : chr  &quot;PRCP&quot; &quot;PRCP&quot; &quot;PRCP&quot; &quot;PRCP&quot; ...</code></pre>
</div>
<div id="getghcn2" class="section level2">
<h2>GetGhcn2</h2>
<p>NCDC also provides GHCN-daily categorized by year under <a href="http://www1.ncdc.noaa.gov/pub/data/ghcn/daily/by_year/" class="uri">http://www1.ncdc.noaa.gov/pub/data/ghcn/daily/by_year/</a>. If the number of the gauges are high, <code>GetGhcn2</code> is much faster in retrieving data. It has the same arguments as <code>GetGhcn</code>.</p>
</div>
</div>
<div id="import-forcingprecipitation-data-used-in-wrf-hydro-model" class="section level1">
<h1>Import forcing/precipitation data used in WRF-Hydro model</h1>
<p>Forcing data used in WRF-Hydro modeling are usually stored in forcing files (such as LDASIN or PRECIP_FORCING files). Here we are going to use the data provided under “Fourmile_Creek” dataset.</p>
<p>Set a data path to the Fourmile Creek test case.</p>
<pre class="sourceCode r"><code class="sourceCode r">fcPath &lt;-<span class="st"> '~/wrfHydroTestCases/Fourmile_Creek_testcase_v2.0'</span></code></pre>
<p>First make a list of all the forcing files.</p>
<pre class="sourceCode r"><code class="sourceCode r">forcingPath &lt;-<span class="st"> </span><span class="kw">paste0</span>(fcPath,<span class="st">&quot;/FORCING&quot;</span>)
files &lt;-<span class="st"> </span><span class="kw">list.files</span>(<span class="dt">path =</span> forcingPath, <span class="dt">full.names =</span> <span class="ot">TRUE</span>, <span class="dt">pattern =</span> <span class="kw">glob2rx</span>(<span class="st">&quot;201304*LDASIN_DOMAIN1&quot;</span>))</code></pre>
<p>In order to be able to pull data from the netcdf files, one needs the location of the points in the geogrid domain file. However, only lat/lon locations of rain gauges are available if using <code>SelectGhcnGauges</code> function. Therefore, it is required to map lat/lon information to x/y information in geogrid in order to pull the data from the netcdf files. This can be done using <code>GetGeogridIndex</code> function in rwrfhydro. One needs to provide the address to geogrid file, the lat/lon info and the <code>GetGeogridIndex</code> function return a dataframe with two column <code>sn</code> (south-north) and <code>ew</code> (east-west).</p>
<pre class="sourceCode r"><code class="sourceCode r">geoFile &lt;-<span class="st"> </span><span class="kw">paste0</span>(fcPath,<span class="st">'/DOMAIN/geo_em_d01.Fourmile1km.nlcd11.nc'</span>)
rainGgaugeInds &lt;-<span class="st"> </span><span class="kw">GetGeogridIndex</span>(<span class="dt">xy =</span> <span class="kw">data.frame</span>(<span class="dt">lon=</span>sg$longitude, <span class="dt">lat=</span>sg$latitude),
                                  <span class="dt">ncfile =</span> geoFile)
sg &lt;-<span class="st"> </span><span class="kw">cbind</span>(sg,rainGgaugeInds)
<span class="kw">head</span>(sg)</code></pre>
<pre><code>##       country network stationID latitude longitude elevation state
## 49858      US       1  COBO0063  40.0359 -105.5442    3026.1    CO
## 49912      US       1  COBO0159  40.0395 -105.3275    2104.9    CO
## 49966      US       1  COBO0280  40.0295 -105.3842    2319.2    CO
## 49976      US       1  COBO0297  40.0305 -105.3918    2335.4    CO
## 49980      US       1  COBO0301  40.0507 -105.3741    2031.8    CO
## 49984      US       1  COBO0318  40.0394 -105.3612    1931.8    CO
##                                 name GSNflag HCN/CRNflag WMOID     siteIds
## 49858 NEDERLAND 5.2 NNW                                        US1COBO0063
## 49912 BOULDER 3.6 WNW                                          US1COBO0159
## 49966 BOULDER 7.0 W                                            US1COBO0280
## 49976 BOULDER 7.4 W                                            US1COBO0297
## 49980 BOULDER 6.7 WNW                                          US1COBO0301
## 49984 BOULDER 5.9 W                                            US1COBO0318
##       we sn
## 49858  2  4
## 49912 21  4
## 49966 16  3
## 49976 15  3
## 49980 17  5
## 49984 18  4</code></pre>
<p>Now we can pull data. One needs to prepare the file, var, and ind variables for <code>GetMultiNcdf</code> function (refer to Collect Output Data: GetMultiNcdf vignette . You can leave the stat as mean; since you are pulling data for single pixels, means return the value of the pixel.</p>
<pre class="sourceCode r"><code class="sourceCode r">flList &lt;-<span class="st"> </span><span class="kw">list</span>(<span class="dt">forcing =</span> files)
varList &lt;-<span class="st"> </span><span class="kw">list</span>(<span class="dt">forcing =</span> <span class="kw">list</span>(<span class="dt">PRCP =</span> <span class="st">'RAINRATE'</span>))
prcpIndex &lt;-<span class="st"> </span><span class="kw">list</span>()
for (i in <span class="dv">1</span>:<span class="kw">length</span>(sg$siteIds)) {
  if (!<span class="kw">is.na</span>(sg$we[i]) &amp;<span class="st"> </span>!<span class="kw">is.na</span>(sg$sn[i])) {
    prcpIndex[[<span class="kw">as.character</span>(sg$siteIds[i])]] &lt;-<span class="st"> </span><span class="kw">list</span>(<span class="dt">start=</span><span class="kw">c</span>(sg$we[i], sg$sn[i],<span class="dv">1</span>),
                                                     <span class="dt">end=</span><span class="kw">c</span>(sg$we[i], sg$sn[i],<span class="dv">1</span>), <span class="dt">stat=</span><span class="st">&quot;mean&quot;</span>)
  }
}
indList &lt;-<span class="kw">list</span>(<span class="dt">forcing =</span> <span class="kw">list</span>(<span class="dt">PRCP =</span> prcpIndex))
prcpData &lt;-<span class="st"> </span><span class="kw">GetMultiNcdf</span>(<span class="dt">file =</span> flList, <span class="dt">var =</span> varList, <span class="dt">ind =</span> indList, <span class="dt">parallel=</span><span class="ot">FALSE</span>)
<span class="kw">head</span>(prcpData)</code></pre>
<pre><code>##      POSIXct          inds stat     statArg variable        value
## 1 2013-04-01   2:2,4:4,1:1 mean US1COBO0063 RAINRATE 0.000000e+00
## 2 2013-04-01 21:21,4:4,1:1 mean US1COBO0159 RAINRATE 5.000000e-07
## 3 2013-04-01 16:16,3:3,1:1 mean US1COBO0280 RAINRATE 0.000000e+00
## 4 2013-04-01 15:15,3:3,1:1 mean US1COBO0297 RAINRATE 0.000000e+00
## 5 2013-04-01 17:17,5:5,1:1 mean US1COBO0301 RAINRATE 4.371689e-07
## 6 2013-04-01 18:18,4:4,1:1 mean US1COBO0318 RAINRATE 5.000000e-07
##   variableGroup fileGroup
## 1          PRCP   forcing
## 2          PRCP   forcing
## 3          PRCP   forcing
## 4          PRCP   forcing
## 5          PRCP   forcing
## 6          PRCP   forcing</code></pre>
<p><code>GetMultiNcdf</code> pulls the time information from the netcdf files, if the data is not prepared properly, and the time info is not available, it will return the name of the file instead. In that case, time should be retrieved from the file name which is save in column <code>POSIXct</code>. Since the <code>obsPrcp</code> data are converted to mm, we also convert the rainrate to rain depth in an hour.</p>
<pre class="sourceCode r"><code class="sourceCode r">prcpData$value &lt;-<span class="st"> </span>prcpData$value*<span class="dv">3600</span></code></pre>
<div id="aggregating-hourly-data-into-daily." class="section level3">
<h3>Aggregating hourly data into daily.</h3>
<p>Each GHCN gauge has a unique reporting time which the daily data is been calculated based on that. The reporting time is archived in the csv files and is retrieved when calling <code>GetGhcn2</code> function (you will not get the reporting time using <code>GetGhcn</code>). We need to add the reporting time for each point which would be the base for daily aggregation. If there will not be any <code>reportTime</code> in <code>sg</code> columns, then it uses the default which is 0700 AM.</p>
<pre class="sourceCode r"><code class="sourceCode r">if (<span class="st">&quot;reportTime&quot;</span> %in%<span class="st"> </span><span class="kw">names</span>(prcpData)) {
  sg$reportTime &lt;-<span class="st"> </span>obsPrcp$reportTime[<span class="kw">match</span>(sg$siteIds, obsPrcp$siteIds)]
  sg$reportTime[<span class="kw">which</span> (sg$reportTime==<span class="st">&quot;&quot;</span> |<span class="st"> </span><span class="kw">is.na</span>(sg$reportTime))] &lt;-<span class="dv">700</span>
}else{
  sg$reportTime&lt;-<span class="st"> </span><span class="dv">700</span>
}</code></pre>
<p>Call the <code>CalcDailyGhcn</code> function which takes the following steps:</p>
<ol style="list-style-type: decimal">
<li>It first search for a column called <code>timeZone</code> in the <code>sg</code> (selected gauges) dataframe. If the time zone has not been provided, it will call <code>GetTimeZone(sg)</code>. To <code>GetTimeZone</code> works, <code>sg</code> requires to have at least two fields of <code>latitude</code> and <code>longitude</code>.</li>
<li>Having time zone for each gauge, the time offset will be obtained from the <code>tzLookup</code> data provided with rwrfhydro. Using the time offset, the UTC time of the precipitation will be converted to Local Standard Time (LST). This is the time convention, GHCN-D data report.</li>
<li>The precipitation data will be aggregated based on the reporting time of individual gauge. After the <code>dailyData</code> is returned, you can remove the days which do not have full hours reports, <code>numberOfDataPoints</code> column has the number of hours that observation was available within a day.</li>
</ol>
<pre class="sourceCode r"><code class="sourceCode r"><span class="kw">names</span>(prcpData)[<span class="kw">names</span>(prcpData) ==<span class="st"> 'value'</span>] &lt;-<span class="st"> 'DEL_ACCPRCP'</span>
dailyData &lt;-<span class="st"> </span><span class="kw">CalcDailyGhcn</span>(<span class="dt">sg =</span> sg,<span class="dt">prcp =</span> prcpData)
<span class="kw">head</span>(dailyData)</code></pre>
<pre><code>##      ghcnDay     statArg  dailyPrcp numberOfDataPoints
## 1 2013-04-01 US1COBO0063 0.00000000                 15
## 2 2013-04-01 US1COBO0159 0.03580000                 15
## 3 2013-04-01 US1COBO0280 0.01710000                 15
## 4 2013-04-01 US1COBO0297 0.01710000                 15
## 5 2013-04-01 US1COBO0301 0.03345012                 15
## 6 2013-04-01 US1COBO0318 0.03580000                 15</code></pre>
</div>
<div id="comparing-daily-qpeqpf-versus-ghcn-d" class="section level3">
<h3>Comparing daily QPE/QPF versus GHCN-D</h3>
<p>Final step if to find the common data between the two dataset (precipitation time series (<code>dailyData</code>) and the observed GHCN-D (<code>obsPrcp</code>)). This can be very fast if using data.table.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co">#usind data.table merge</span>
common &lt;-<span class="st"> </span>data.table:::<span class="kw">merge.data.table</span>(data.table::<span class="kw">as.data.table</span>(dailyData),
                                        data.table::<span class="kw">as.data.table</span>(obsPrcp),
                                        <span class="dt">by.x=</span><span class="kw">c</span>(<span class="st">&quot;ghcnDay&quot;</span>,<span class="st">&quot;statArg&quot;</span>),
                                        <span class="dt">by.y=</span><span class="kw">c</span>(<span class="st">&quot;Date&quot;</span>,<span class="st">&quot;siteIds&quot;</span>))
<span class="kw">head</span>(common)</code></pre>
<pre><code>##       ghcnDay     statArg  dailyPrcp numberOfDataPoints dailyGhcn qFlag
## 1: 2013-04-01 US1COBO0159 0.03580000                 15    -999.9    NA
## 2: 2013-04-01 US1COBO0318 0.03580000                 15    -999.9    NA
## 3: 2013-04-01 US1COBO0320 0.03345012                 15       0.0    NA
## 4: 2013-04-01 USS0005J42S 0.00000000                 15       2.5    NA
## 5: 2013-04-01 USW00094075 0.00000000                 15       3.0    NA
## 6: 2013-04-02 US1COBO0159 1.25950000                 24    -999.9    NA
##    element
## 1:    PRCP
## 2:    PRCP
## 3:    PRCP
## 4:    PRCP
## 5:    PRCP
## 6:    PRCP</code></pre>
<p>Call the <code>CalcStatCont</code> function and it returns all the requested statistics. The default are <code>numPaired</code> (number of paired data), <code>meanObs</code> (mean of observation data), <code>meanMod</code> (mean of model/forecast data), <code>pearsonCor</code> (Pearson correlation coefficient), <code>RMSE</code> (root mean square error), and <code>multiBias</code> (multiplicative bias). Here we want to get the statistics for each gauge, therefore, we need to group the data for each gauge. This can be done by defining <code>groupBy</code> to be the column name having siteIds, here <code>statArg</code>.</p>
<pre class="sourceCode r"><code class="sourceCode r">stat &lt;-<span class="st"> </span><span class="kw">CalcStatCont</span>(<span class="dt">DT =</span> common, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span> , 
                     <span class="dt">obsMissing =</span> -<span class="fl">999.9</span>, <span class="dt">groupBy =</span> <span class="st">&quot;statArg&quot;</span>)</code></pre>
<p><img src="" width="600" height="600" /></p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co"># CalcStatCont will return a list having two elements of stat and plotList.</span>
<span class="kw">names</span>(stat)</code></pre>
<pre><code>## [1] &quot;stat&quot;     &quot;plotList&quot;</code></pre>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co">#To check the statistics </span>
stat$stat</code></pre>
<pre><code>##       statArg numPaired  meanObs  meanMod pearsonCor      RMSE multiBias
## 1 US1COBO0320        16 1.937500 2.383607 0.88954721  2.416111 1.2302489
## 2 USS0005J42S        31 6.222581 5.061032 0.05276855 10.640351 0.8133333
## 3 USW00094075        31 7.064516 5.061032 0.08649885 12.650551 0.7164018
## 4 US1COBO0159        26 4.638462 3.041277 0.75336969  5.595999 0.6556650
## 5 US1COBO0318        10 6.240000 3.841030 0.62419711  6.713896 0.6155497</code></pre>
<p>If the <code>groupBy</code> is <code>NULL</code> then it will return four informative plots.</p>
<pre class="sourceCode r"><code class="sourceCode r">common2 &lt;-<span class="st"> </span>common[statArg ==<span class="st"> </span><span class="kw">unique</span>(statArg)[<span class="dv">1</span>]]
stat &lt;-<span class="st"> </span><span class="kw">CalcStatCont</span>(<span class="dt">DT =</span> common2, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span>, <span class="dt">obsMissing =</span> -<span class="fl">999.9</span>, <span class="dt">title =</span> common2$statArg)</code></pre>
<pre><code>## Warning: Removed 22 rows containing missing values (geom_smooth).</code></pre>
<p><img src="" width="600" height="600" /></p>
<p>You can choose among the four plots by changing the <code>plot.list</code> argument.</p>
<pre class="sourceCode r"><code class="sourceCode r">stat &lt;-<span class="st"> </span><span class="kw">CalcStatCont</span>(<span class="dt">DT =</span> common2, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span> , <span class="dt">obsMissing =</span> -<span class="fl">999.9</span>, <span class="dt">plot.list =</span> <span class="st">&quot;scatterPlot&quot;</span>)</code></pre>
<pre><code>## Warning: Removed 22 rows containing missing values (geom_smooth).</code></pre>
<p><img src="" width="600" height="600" /></p>
<p>You can also calculate conditional statistics by defining the boundaries you are interested in. For example, here we calculate the statistics conditioned on the observation to be greater than 1 mm.</p>
<pre class="sourceCode r"><code class="sourceCode r">stat &lt;-<span class="st"> </span><span class="kw">CalcStatCont</span>(<span class="dt">DT =</span> common2, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span> , 
                     <span class="dt">obsCondRange =</span> <span class="kw">c</span>(<span class="dv">1</span>, <span class="ot">Inf</span>), <span class="dt">plot.list =</span> <span class="st">&quot;scatterPlot&quot;</span>)</code></pre>
<pre><code>## Warning: Removed 10 rows containing missing values (geom_smooth).</code></pre>
<p><img src="" width="600" height="600" /></p>
</div>
<div id="calculate-statistics-over-rfcs" class="section level3">
<h3>Calculate statistics over RFCs</h3>
<p>Sometime the verification result at the gauge location is not desired and we want to find the performance of a model over a domain or polygon. If you want to calculate statistics over RFC’s, then use <code>GetRfc</code> function. One can find out a gauge (point) falls in which RFC using <code>GetRfc</code>. You simply feed a dataframe having at least two columns of <code>latitude</code> and <code>longitude</code> and this functions adds a column to a dataframe with RFC name.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co"># add rfc name</span>
sg &lt;-<span class="st"> </span><span class="kw">GetRfc</span>(sg)

<span class="co"># check what is been added</span>
<span class="kw">head</span>(sg)</code></pre>
<pre><code>##   country network stationID elevation state                           name
## 1      US       1  COBO0063    3026.1    CO NEDERLAND 5.2 NNW             
## 2      US       1  COBO0159    2104.9    CO BOULDER 3.6 WNW               
## 3      US       1  COBO0280    2319.2    CO BOULDER 7.0 W                 
## 4      US       1  COBO0297    2335.4    CO BOULDER 7.4 W                 
## 5      US       1  COBO0301    2031.8    CO BOULDER 6.7 WNW               
## 6      US       1  COBO0318    1931.8    CO BOULDER 5.9 W                 
##   GSNflag HCN.CRNflag WMOID     siteIds we sn reportTime   rfc longitude
## 1                           US1COBO0063  2  4        700 MBRFC -105.5442
## 2                           US1COBO0159 21  4        700 MBRFC -105.3275
## 3                           US1COBO0280 16  3        700 MBRFC -105.3842
## 4                           US1COBO0297 15  3        700 MBRFC -105.3918
## 5                           US1COBO0301 17  5        700 MBRFC -105.3741
## 6                           US1COBO0318 18  4        700 MBRFC -105.3612
##   latitude
## 1  40.0359
## 2  40.0395
## 3  40.0295
## 4  40.0305
## 5  40.0507
## 6  40.0394</code></pre>
<p>Now, add a column to the <code>common</code> data having the <code>rfc</code> information for each data. And calculate the statistics based on grouping by RFC.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co"># merge the common data.table with the sg data.frame</span>
common &lt;-<span class="st"> </span>data.table:::<span class="kw">merge.data.table</span>(common,data.table::<span class="kw">as.data.table</span>(sg[, <span class="kw">c</span>(<span class="st">&quot;siteIds&quot;</span>, <span class="st">&quot;rfc&quot;</span>)]),
                                        <span class="dt">by.x=</span><span class="kw">c</span>(<span class="st">&quot;statArg&quot;</span>),
                                        <span class="dt">by.y=</span><span class="kw">c</span>(<span class="st">&quot;siteIds&quot;</span>))

<span class="co"># calculate statistics using grouping by rfc</span>
stat &lt;-<span class="st"> </span><span class="kw">CalcStatCont</span>(<span class="dt">DT =</span> common, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span> , 
                     <span class="dt">groupBy =</span> <span class="st">&quot;rfc&quot;</span>, <span class="dt">obsMissing =</span> -<span class="fl">999.9</span>, <span class="dt">plot.it =</span> <span class="ot">FALSE</span>)

stat$stat</code></pre>
<pre><code>##     rfc numPaired  meanObs meanMod pearsonCor     RMSE multiBias
## 1 MBRFC       114 5.490351 4.11759  0.2803399 9.285539 0.7499684</code></pre>
<p>As you see above, all the gauges belong to one rfc (<code>MBRFC</code>), therefore there will be only one category.</p>
</div>
<div id="calculate-statistics-over-polygons" class="section level3">
<h3>Calculate statistics over polygons</h3>
<p>One can calculate the statistics over any desired polygon shapefile. First, you need to use <code>GetPoly</code> function to find each point falls into which polygon. <code>GetPoly</code> takes a dataframe containing at least two fields of <code>latitude</code> and <code>longitude</code>, overlays the points with a <code>SpatialPolygonDataFrame</code> and return the requested attribute from the polygon. You can use the available <code>SpatialPolygon*</code> loaded into memory or provide the address to the location of a polygon shapefile and the name of the shapefile. The clipped HUC12 shapefile is provided with the test case. The northeast of the clipped polygon covers partially the Fourmile Creek domain. Here, we ty to find the corresponding polygon to each gauge and calculate the statistics over those polygons.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co"># add HUC12 ids</span>
polygonAddress &lt;-<span class="st"> </span><span class="kw">paste0</span>(<span class="kw">path.expand</span>(fcPath), <span class="st">&quot;/polygons&quot;</span>)
sg &lt;-<span class="st"> </span><span class="kw">GetPoly</span> (sg,  <span class="dt">polygonAddress =</span> polygonAddress,
               <span class="dt">polygonShapeFile =</span> <span class="st">&quot;clipped_huc12&quot;</span>,
               <span class="dt">join=</span><span class="st">&quot;HUC12&quot;</span>)

<span class="co"># check what is been added</span>
<span class="kw">head</span>(sg)

<span class="co"># merge the common data.table with the sg data.frame</span>
common &lt;-<span class="st"> </span>data.table:::<span class="kw">merge.data.table</span>(common,data.table::<span class="kw">as.data.table</span>(sg[, <span class="kw">c</span>(<span class="st">&quot;siteIds&quot;</span>,<span class="st">&quot;HUC12&quot;</span>)]),
                                        <span class="dt">by.x=</span><span class="kw">c</span>(<span class="st">&quot;statArg&quot;</span>),
                                        <span class="dt">by.y=</span><span class="kw">c</span>(<span class="st">&quot;siteIds&quot;</span>))

<span class="co"># calculate statistics using grouping by HUC12</span>
stat &lt;-<span class="st"> </span><span class="kw">CalcStatCont</span>(<span class="dt">DT =</span> common, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span>, 
                     <span class="dt">obsMissing =</span> -<span class="fl">999.9</span>, <span class="dt">groupBy =</span> <span class="st">&quot;HUC12&quot;</span>, <span class="dt">plot.it =</span> <span class="ot">FALSE</span>)
stat$stat</code></pre>
<p>All the gauges with available data belong to one HUC12, therefore there is only one category.</p>
</div>
<div id="calculate-categorical-statistics" class="section level3">
<h3>Calculate categorical statistics</h3>
<p>You can also calculate some of the categorical statistics using <code>CalcStatCategorical</code> function. It accepts both categorical variable and continuous ones. If the data is actually categorical, variable <code>category</code> should be defined. The elements in <code>category</code> will be used as <code>YES</code> and <code>NO</code> in contingency table. If the data is numeric, then a set of thresholds should be defined. Values exceeding the threshold would be flagged as <code>YES</code> and the values below the threshold are considered <code>NO</code>. You can choose from the available statistics by changing the <code>statList</code> argument. By default, it calculates the Hit Rate (H), False Alarm Ratio (FAR) and Critical Success Index (CSI). The grouping option is similar to <code>CalcStatCont</code>.</p>
<pre class="sourceCode r"><code class="sourceCode r"><span class="co"># calculate categorical statistics</span>
stat &lt;-<span class="st"> </span><span class="kw">CalcStatCategorical</span>(<span class="dt">DT =</span> common, <span class="dt">obsCol =</span> <span class="st">&quot;dailyGhcn&quot;</span>, <span class="dt">modCol =</span> <span class="st">&quot;dailyPrcp&quot;</span>, 
                            <span class="dt">obsMissing =</span> -<span class="fl">999.9</span>, <span class="dt">groupBy =</span> <span class="st">&quot;statArg&quot;</span>, <span class="dt">threshold =</span> <span class="kw">c</span>(<span class="dv">1</span>,<span class="dv">5</span>))
stat</code></pre>
<pre><code>##        statArg threshold         H       FAR       CSI
## 1  US1COBO0159         1 0.8888889 0.3333333 0.6153846
## 2  US1COBO0318         1 0.8333333 0.0000000 0.8333333
## 3  US1COBO0320         1 0.6666667 0.6666667 0.2857143
## 4  USS0005J42S         1 0.8181818 0.2173913 0.6666667
## 5  USW00094075         1 0.7777778 0.3913043 0.5185185
## 6  US1COBO0159         5 0.5714286 0.2000000 0.5000000
## 7  US1COBO0318         5 0.5000000 0.0000000 0.5000000
## 8  US1COBO0320         5 1.0000000 0.3333333 0.6666667
## 9  USS0005J42S         5 0.3571429 0.5000000 0.2631579
## 10 USW00094075         5 0.5000000 0.4000000 0.3750000</code></pre>
</div>
</div>



<!-- dynamically load mathjax for compatibility with self-contained -->
<script>
  (function () {
    var script = document.createElement("script");
    script.type = "text/javascript";
    script.src  = "https://mathjax.rstudio.com/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML";
    document.getElementsByTagName("head")[0].appendChild(script);
  })();
</script>

</body>
</html>
