####################################################################
# Auxiliary Functions
####################################################################

################################################################
# Arguments:
#   y.k(ns.k, nt): observed data on original scale
#   mu.y.k(ns.k, nt): E(Y)
#   r.inv.k(nt): vector of random effects for each day
#
# Returns:
#   z(ns.k, nt): standardized data
################################################################
Y2Z <- function(y.k, mu.y.k, r.inv.k){
    
  ns <- nrow(y.k)
  nt <- ncol(y.k)
    
  z <- (y.k - mu.y.k) * (sqrt(r.inv.k)) 
    
  return(z)
}

################################################################
# Arguments:
#   z(ns.k, nt): standardized data 
#   mu.y(ns.k, nt): E(y)
#   r.inv.k(nt): vector of random effects for each day
#
# Returns:
#   y(ns.k, nt): data back in original scale
################################################################
Z2Y <- function(z, mu.y.k, r.inv.k){
    
  ns <- nrow(z)
  nt <- ncol(z)
    
  y <- z / sqrt(r.inv.k) + mu.y.k
    
  return(y)
}

################################################################
# Arguments:
#   s(ns, 2): spatial locations
#   knots(nknots, 2, nt): knot locations
#	y(ns, nt): observed data
#   x(ns, nt, p): array of covariates
#   nt(1): number of days
#
# Returns:
#   list: 
#     y.by.knots[nt][nknots]: list of observed data vectors(ns.k)
#     x.by.knots[nt][nknots]: list of covariate matrices(ns.k, p)
#     partition(ns, nt): matrix of partition membership
################################################################
Membership <- function(s, knots, y, x){
  nt <- ncol(y)
  ns <- nrow(s)
  nknots <- dim(knots)[1]
  p <- dim(x)[3]
  y.by.knots <- x.by.knots <- vector("list", nt)
    
  # membership matrix
  partition <- matrix(NA, ns, nt) # a number letting us know which partition.
    
  for (t in 1:nt) {
    y.by.knots[[t]] <- x.by.knots[[t]] <- vector("list", nknots)
    
    knots.kt <- matrix(knots[, , t], nknots, 2)
    d <- rdist(s, knots.kt)
    partition[, t] <- apply(d, 1, which.min)
    
    for(k in 1:nknots){
      these <- which(partition[, t] == k)
      ns.k  <- length(these)
      if(ns.k == 0){
        y.by.knots[[t]][[k]] <- x.by.knots[[t]][[k]] <- NA
      } else if (ns.k == 1){
        y.by.knots[[t]][[k]] <- y[these, t]
        x.by.knots[[t]][[k]] <- matrix(x[these, t, ], nrow=1, ncol=p)
      } else {
        y.by.knots[[t]][[k]] <- y[these, t]
        x.by.knots[[t]][[k]] <- x[these, t, ]
      }
    }
  
  }
    
  results <- list(partition=partition, y.by.knots=y.by.knots, 
                  x.by.knots=x.by.knots)
    
  return(results)
}

################################################################
# Arguments:
#   partition(ns, nt): matrix of partition membership
#   x.by.knots[nt][nknots]: list of covariate matrices(ns.k, p)
#	beta.y(p): parameters for E(y)
#   nknots(1): number of knots
#
# Returns:
#   mu.y[nt][nknots]: list of E(Y) vectors(ns.k)
################################################################
ExpectY <- function(partition, x.by.knots, beta.y, nknots){
  nt <- ncol(partition)
  mu.y <- vector("list", nt)
  
  for (t in 1:nt) {
    mu.y[[t]] <- vector("list", nknots)
    
    for (k in 1:nknots) {
      these <- which(partition[, t] == k)
      ns.k  <- length(these)
      x.k   <- x.by.knots[[t]][[k]]
      if (ns.k == 0) {
        mu.y.k <- NA
      } else {
        mu.y.k <- x.k %*% beta.y       
      }
      
      mu.y[[t]][[k]] <- mu.y.k		
    }
  }
  
  return(mu.y)
	
}

################################################################
# Arguments:
#   d(ns, ns): distance between observations
#   alpha(1): controls proportion of spatial to non-spatial 
#             covariance (0: ind, 1: high spatial corr)
#   rho(1): spatial range
#   nu(1): matern smoothness parameter
#	cov(bool): do we need a variance term
#
# Returns:
#   cor(ns, nt): matern correlation
################################################################
CorFx <- function(d, alpha, rho, nu, cov=F){
    
  ns <- nrow(d)
  cor <- alpha * matern(d, rho, nu)
  if (!cov) { # S12 doesn't need diagonal variance term
    cor <- cor + (1 - alpha) * diag(rep(1, ns)) 
  } 
    
  return(cor)
}

################################################################
# Arguments:
#   d(ns, ns): distance between observations
#   alpha(1): controls proportion of spatial to non-spatial 
#             covariance (0: ind, 1: high spatial corr)
#   rho(1): spatial range
#   nu(1): matern smoothness parameter
#   partition(ns, nt): matrix of partition membership
#   nknots(1): number of knots
#   thresh(1): small amount for numerical stability
#
# Returns:
#   list: 
#     prec[nt][nknots]: list of precision matrices(ns.k, ns.k)
#     log.det(nknots, nt): logdet(prec)
#     sig[nt][nknots]: list of correlation matrices(ns.k, ns.k)
################################################################
SpatCor <- function(d, alpha, rho, nu=0.5, partition, nknots, eps=10^(-5)){
  nt <- ncol(partition)
  q <- sig <- vector("list", nt)
  log.det <- rep(0, nknots)

  for (t in 1:nt) {
    q[[t]] <- sig[[t]] <- vector("list", nknots)
  
    for (k in 1:nknots) {
      these <- which(partition[, t] == k)
      ns.k  <- length(these)
      if (ns.k == 0) {
        q[[t]][[k]] <- sig[[t]][[k]] <- NA
      } else if (ns.k == 1) {
        q[[t]][[k]] <- sig[[t]][[k]] <- matrix(1,1,1)
      } else {
        ddd       <- d[these, ]
        ddd       <- ddd[, these]
        sig.k     <- CorFx(ddd, alpha, rho, nu, cov=FALSE)
        sig.chol  <- chol(sig.k)
        diag.chol <- ifelse(diag(sig.chol) < eps, eps, diag(sig.chol))
        log.det.k <- 2 * sum(log(diag.chol))
        
        log.det[k, t] <- 1/log.det.k
        sig[[t]][[k]] <- sig.k
        q[[t]][[k]]   <- chol2inv(sig.chol)
      }
    }
  }
  
  results <- list(prec=q, log.det=log.det, sig=sig)

  return(results)

}

################################################################
# Arguments:
#   y.by.knots[nt][nknots]: list of observed data vectors(ns.k)
#   mu.y[nt][nknots]: list of E(Y) vectors(ns.k)
#	prec[nt][nknots]: list of precision matrices(ns.k, ns.k)
#   partition(ns, nt): matrix of partition membership
#   nt(1): number of days
#	nknots(1): number of knots
#
# Returns:
#   ss(nknots, nt): sum of squares for each partition per day
################################################################
SumSquares <- function(y.by.knots, mu.y, prec, partition, 
                       nt, nknots){

  ss <- matrix(NA, nknots, nt)
  
  for (t in 1:nt) {
  
    for (k in 1:nknots) {
      these  <- which(partition[, t] == k) # identify sites to includes
      ns.k   <- length(these)
      if (ns.k > 0) {
        res.k  <- y.by.knots[[t]][[k]] - mu.y[[t]][[k]]
        prec.k <- prec[[t]][[k]]
        ss[k, t] <- t(res.k) %*% prec.k %*% res.k
      } else {
        ss[k, t] <- 0
      }
    }
  }  
  return(ss)
}

################################################################
# Arguments:
#   prec.beta(p, p): prior precision of beta
#   e.beta(p): prior mean of beta
#   x.by.knots[nt][nknots]: list of covariate matrices(ns.k, p)
#   y.by.knots[nt][nknots]: list of observed data vectors(ns.k)
#   prec[nt][nknots]: list of precision matrices(ns.k, ns.k)
#   partition(ns, nt): matrix of partition membership
#   r.inv(nknots, nt): matrix of random effects at each partition (var scale)
#   nt(1): number of days
#
# Returns:
#   list: 
#     vvv(p, p): posterior variance of beta
#     mmm(p): posterior mean of beta
################################################################
BetaPosterior <- function(prec.beta, e.beta, x.by.knots, y.by.knots, 
						  prec, partition, r.inv, nt){
  
  nknots <- nrow(r.inv)
  p      <- length(e.beta)
  vvv    <- prec.beta
  mmm    <- e.beta
  
  for (t in 1:nt) {
  
    for (k in 1:nknots) {
      these   <- which(partition[, t] == k) # identify sites to includes
      ns.k    <- length(these)
      if (ns.k > 0) {
        x.k     <- matrix(x.by.knots[[t]][[k]], ns.k, p)
        prec.k  <- prec[[t]][[k]]
        y.k     <- y.by.knots[[t]][[k]] 
        r.inv.k <- r.inv[k, t]
        
        ttt  <- t(x.k) %*% prec.k * r.inv.k
        vvv  <- vvv + ttt %*% x.k
        mmm  <- mmm + ttt %*% y.k
      }
    }
  }
	
  vvv <- chol2inv(chol(vvv))

  results <- list(vvv=vvv, mmm=mmm)

  return(results)
}

################################################################
# Arguments:
#   ss(nknots, nt): sum of squares for each partition per da
#   log.det(nknots, nt): logdet(prec)
#   r.inv(nknots, nt): matrix of random effects at each partition (var scale)
#   partition(ns, nt): vector of partition membership
#
# Returns:
#   llike(nknots, nt): (log)likelihood
################################################################
LLike <- function(ss, log.det, r.inv, partition, log=TRUE){
  nknots <- nrow(r.inv)
  log.like  <- matrix(NA, nknots, nt)
  
  for (t in 1:nt) {
    
    for (k in 1:nknots) {
      these <- which(partition[, t] == k) # identify sites to includes
      ns.k  <- length(these)
      r.inv.k <- r.inv[k, t]
      log.like[k, t] <- 0.5 * log.det[k, t] + 0.5 * ns.k * (log(r.inv.k)) - 
                        0.5 * ss[k, t] * r.inv.k
    }
  
  }  
  if(!log){
    log.like <- exp(log.like)
  }
    
  return(log.like)
}

################################################################
# Arguments:
#   preds(yp, nt, iters): mcmc predictions at validation
#                         locations
#   probs(nprobs): sample quantiles for scoring
#   validate(np, nt): validation data
#
# Returns:
#   score(nprobs): a single quantile score per quantile
################################################################
QuantScore <- function(preds, probs, validate){
  nt <- ncol(validate)
  np <- nrow(validate)
  nprobs <- length(probs)
        
  # apply gives nprobs x nsites. looking to find each site's quantile over all
  # of the days.
  pred.quants <- apply(preds, 1, quantile, probs=probs, na.rm=T)
    
  scores.sites <- array(NA, dim=c(nprobs, np, nt))
    
  for (q in 1:nprobs) {
    diff <- pred.quants[q, ] - validate
    i <- ifelse(diff >= 0, 1, 0)
    scores.sites[q, , ] <- 2 * (i - probs[q]) * diff
  }
    
  scores <- apply(scores.sites, 1, mean, na.rm=T)

  return(scores)
}

################################################################
# Arguments:
#   preds(yp, nt, iters): mcmc predictions at validation
#                         locations
#   probs(nthreshs): sample quantiles for scoring
#   validate(np, nt): validation data
#
# Returns:
#   list:
#     scores(nthreshs): a single brier score per threshold
#     threshs(nthreshs): sample quantiles from dataset
################################################################
BrierScore <- function(preds, probs, validate){
  nthreshs <- length(probs)
  thresholds <- quantile(validate, probs=probs, na.rm=T)
    
  scores <- rep(NA, nthreshs)
  for (b in 1:nthreshs) {
    pat <- apply((preds > thresholds[b]), c(1, 2), mean)
    ind <- validate < thresholds[b]
    scores[b] <- mean((ind - pat)^2, na.rm=T)
  }
    
  return(scores)
}

################################################################
# Arguments:
#   s(ns, 2): spatial locations
#
# Returns:
#   s.scale(ns, 2): locations scaled to be in [0, 1] x [0, 1]
################################################################
ScaleLocs <- function(s){
  x.min <- min(s[, 1]); x.max <- max(s[, 1]); x.range <- x.max - x.min
  y.min <- min(s[, 2]); y.max <- max(s[, 2]); y.range <- y.max - y.min

  s.x <- (s[,1] - x.min) / x.range
  s.y <- (s[,2] - y.min) / y.range
	
  s.scale <- cbind(s.x, s.y)
	
  return(s.scale)   
}