if (FALSE){
library(mlogit)
library(plyr)
library(reshape2)
#library(lme4)
library(rbenchmark)
library(Matrix)
library(MatrixModels)

setwd("~/Dropbox/MNL FML")
source("R/classes_methods.R")
source("R/utilities.R")
source("R/getFrame.R")
source("R/getData.R")
dyn.load("src/nrmm.so")
.Call("init")
}

#'
#' The high dimensional logit model using sparse matrix
#'
#' @param formula the input model formula
#' @param data input data of class mlogit 
#' @param nrho number of rhos to specify the grid  
#' @param rho.range the min and max of rhos that are used to specify the grid
#' @param rho a numeric vector. If not null, this defines the values of rhos 
#' to be used in the estimation
#' @param max.nbr the maximum number of neighbors to search for
#' @param sigma the standard deviation in the Gaussian kernal 
#' @param print whether iteration info should be printed out
#'
#' @return  an object of class hlogit 
#'

nrmm <- function(formula, data, weights = NULL, ncluster = 1, subject = NULL,
                 doFit = TRUE, control = rmm.control(), scales = FALSE){  
  
  mc <- match.call()
  if (ncluster > 1 && !is.null(weights))
    stop("'weights' are not supported for mixture models.")
  #check.data(data)
  
  # get design matrix
  fr <- getFrame(mc, formula, control["use.contrast"])
  X <- fr$X  
  if (scales){
    sx <- scaleX(fr$X)
    X <- sx$X
    attr(X, "scales") <- sx$R
  }
  
  # the dims slot
  dims <- as.integer(c(nrow(X), ncol(X), nlevels(fr$id), nlevels(fr$alt),  
                       length(fr$grp) - 1, 0, max(fr$subject) + 1, ncluster))
  names(dims) <- c("nO", "nB", "nP", "nC", "nG", "nL", "nI", "nS")
  
  # get the cholesky factor  
  if (control["newton"]){
    L <- new("dCHMsimpl")      
  } else{
    xtx <- crossprod(X)
    L <- Cholesky(xtx, super = FALSE, LDL = FALSE)
  }
  ans <- new("nrmm", call = mc, X = X, y = fr$y,
             dims = dims, L = L, wts = fr$wts,
             control = control, frame= fr$mf,
             subject= fr$subject)
  
  # return the object without fitting 
  if (!doFit) return(ans)        
  
  # optimize 
  .Call("R_do_nrmm", ans)
  
  if (ans@control["cvg"] != 0)
    warning("Algorithm does not converge!")
  
  return(ans)  
}

simulateY <- function(ans){
  .Call("R_rmm_update_prob", ans)
  dims <- ans@dims  
  prob <- ans@mu
  nC <- dims["nC"]
  nP <- dims["nP"]
  yl <- lapply(1:nP, function(x){
    idx <- ((x - 1) * nC + 1): (x * nC)
    as.numeric(rmultinom(1, 1, prob = prob[idx]))
  })  
  unlist(yl)  
}

if (0){

# run tests 
# 1) simulate data  
# generate data
set.seed(11)
tmp <- nrmm(choice ~x2+x3|x4|x1, data = da, doFit = FALSE)
beta <- rnorm(ncol(tmp@X), sd = 1)
da$choice <- simulateY(tmp)

f1 <- mlogit(choice ~ x2+x3|x4|x1, data = da)
f2 <- nrmm(choice ~x2+x3|x4|x1, data = da)
f3 <- nrmm(choice ~x2+x3|x4|x1, data = da, 
          control = rmm.control(newton = FALSE))
head(as.numeric(f1$coefficients))
head(as.numeric(f2@beta))
head(as.numeric(f3@beta))
head(beta)

V <- solve(f2@hess)  
head(sqrt(diag(V)))
head(sqrt(as.numeric(diag(vcov(f1)))))

# 2) Use the Fishing data
data("Fishing", package = "mlogit")
data <- mlogit.data(Fishing, varying = c(2:9), shape = "wide", choice = "mode")
data <- transform(data, price = scale(price),
                  catch = scale(catch), 
                  income = scale(income))
data <- mlogit.data(data, choice = "mode", shape = "long", alt.var = "alt")

formula <- mode ~price + catch
formula <- mode ~0||price
formula <- mode ~price + catch|income

f1 <- mlogit(formula, data = data)
f2 <- nrmm(formula, data = data)
f3 <- nrmm(formula, data = data,
           control = rmm.control(newton = FALSE))
as.numeric(f1$coefficients)
as.numeric(f2@beta)
as.numeric(f3@beta)

# 3) model with pre-specified weights
set.seed(11)
wts <- rlnorm(max(data$chid))
wts <- wts[data$chid]
formula <- mode ~price + catch|income
f1 <- mlogit(formula, data = data, weights = wts)
f2 <- nrmm(formula, data = data, weights = wts)
f3 <- nrmm(formula, data = data, weights = wts,
           control = rmm.control(newton = FALSE))
as.numeric(f1$coefficients)
as.numeric(f2@beta)
as.numeric(f3@beta)


# 4) simulate data using the attributes decomp
tmp <- nrmm(choice ~0 + z1+z2 + x1 + x2 + x3, data = da, doFit = FALSE)
beta <- rnorm(ncol(tmp@X), sd = 1)
da$choice <- simulateY(tmp)

mc <- as.call(list(formula = formula, data = da))
f2 <- nrmm(choice ~0 + z1+z2 + x1 + x2 + x3, data = da)
f3 <- nrmm(choice ~0 + z1+z2 + x1 + x2 + x3, data = da,
          control = rmm.control(newton = FALSE))
head(as.numeric(f2@beta))
head(as.numeric(f3@beta))
V <- solve(f2@hess)  
head(sqrt(diag(V)))

# 5) simulate mixture data
S <- 2
tmp <- nrmm(mode ~price + catch, data = data, doFit = FALSE)
pi <- 0.3
dims <- tmp@dims
beta1 <- c(0.5, 0.5, -0.1,  -1, 0.5)#rnorm(dims["nB"])
beta2 <- c(-1, -0.5, 0, 1, -2)#rnorm(dims["nB"])
set.seed(13)
tmp@beta <- matrix(beta1)
choice1 <- simulateY(tmp)
tmp@beta <- matrix(beta2)
choice2 <- simulateY(tmp)  
choice <- choice1  
seg <- rbinom(max(data$chid), 1, pi)
sid2 <- which(seg == 0)  
idx <- data$chid %in% sid2
choice[idx] <- choice2[idx]
data$mode  <- choice

f1 <- nrmm(mode ~price + catch, data = data, ncluster = 2, 
           control = rmm.control(EM.iter= 500, newton=FALSE))
f1@beta
f1@pi

f2 <- nrmm(mode ~price + catch, data = data, ncluster = 2, 
           control = rmm.control(EM.iter= 500, newton=TRUE))
f2@beta
f2@pi

f3 <- mlogitSeg(mode ~price + catch, data = data, nseg = 2, idName = "chid", trace = T)

}