## CHECK WITH TREVOR: why alias here for predict.cv.relaxed as well as in plot.cv.relaxed?
##

#' make predictions from a "cv.glmnet" object.
#'
#' This function makes predictions from a cross-validated glmnet model, using
#' the stored \code{"glmnet.fit"} object, and the optimal value chosen for
#' \code{lambda} (and \code{gamma} for a 'relaxed' fit.
#'
#' This function makes it easier to use the results of cross-validation to make
#' a prediction.
#'
#' @aliases coef.cv.glmnet coef.cv.relaxed predict.cv.glmnet
#' @param object Fitted \code{"cv.glmnet"} or \code{"cv.relaxed"} object.
#' @param newx Matrix of new values for \code{x} at which predictions are to be
#' made. Must be a matrix; can be sparse as in \code{Matrix} package. See
#' documentation for \code{predict.glmnet}.
#' @param s Value(s) of the penalty parameter \code{lambda} at which
#' predictions are required. Default is the value \code{s="lambda.1se"} stored
#' on the CV \code{object}. Alternatively \code{s="lambda.min"} can be used. If
#' \code{s} is numeric, it is taken as the value(s) of \code{lambda} to be
#' used. (For historical reasons we use the symbol 's' rather than 'lambda' to
#' reference this parameter)
#' @param \dots Not used. Other arguments to predict.
#' @return The object returned depends on the \dots{} argument which is passed
#' on to the \code{predict} method for \code{glmnet} objects.
#' @author Jerome Friedman, Trevor Hastie and Rob Tibshirani\cr Maintainer:
#' Trevor Hastie <hastie@@stanford.edu>
#' @seealso \code{glmnet}, and \code{print}, and \code{coef} methods, and
#' \code{cv.glmnet}.
#' @references Friedman, J., Hastie, T. and Tibshirani, R. (2008)
#' \emph{Regularization Paths for Generalized Linear Models via Coordinate
#' Descent, Journal of Statistical Software, Vol. 33, Issue 1, Feb 2010}\cr
#' \url{https://www.jstatsoft.org/v33/i01/}
#' \url{https://arxiv.org/abs/1707.08692}\cr Hastie, T., Tibshirani, Robert,
#' Tibshirani, Ryan (2019) \emph{Extended Comparisons of Best Subset Selection,
#' Forward Stepwise Selection, and the Lasso}
#' @keywords models regression
#' @examples
#'
#' x = matrix(rnorm(100 * 20), 100, 20)
#' y = rnorm(100)
#' cv.fit = cv.glmnet(x, y)
#' predict(cv.fit, newx = x[1:5, ])
#' coef(cv.fit)
#' coef(cv.fit, s = "lambda.min")
#' predict(cv.fit, newx = x[1:5, ], s = c(0.001, 0.002))
#' cv.fitr = cv.glmnet(x, y, relax = TRUE)
#' predict(cv.fit, newx = x[1:5, ])
#' coef(cv.fit)
#' coef(cv.fit, s = "lambda.min", gamma = "gamma.min")
#' predict(cv.fit, newx = x[1:5, ], s = c(0.001, 0.002), gamma = "gamma.min")
#'
#' @method predict cv.glmnet
#' @export
predict.cv.glmnet=function(object,newx,s=c("lambda.1se","lambda.min"),...){
  if(is.numeric(s))lambda=s
  else
    if(is.character(s)){
      s=match.arg(s)
      lambda=object[[s]]
    }
    else stop("Invalid form for s")
  predict(object$glmnet.fit,newx,s=lambda,...)
}
