require(Rniftilib)
require(bigmemory)
require(vegan)
require(foreach)

source("base.R")
source("connectivity.R")

# Default is to allow parellel computing if setup
# options(allowParellel=FALSE) if want otherwise
options(allowParallel=TRUE)

###
# Kendall's W
# - Compare similarity of correlation maps at each voxel across subjects
###

# Take as input conlist, output filename, to.write
# get voxmap at each point and run kendall's W
# return it as a nifti object

# REHO
# look at old code about how to do this

# homotopic connectivity

# alff

# sd of time-seriesas


###
# Subject Correlation Map Distances
###

setOldClass("nifti")
setClass(
    Class = "neurosdist",
    representation = representation(
        address = "externalptr",
        nsubs = "numeric",
        nvoxs = "numeric",
        coords = "data.frame",
        ref = "nifti"
    )
)
setIs("neurosdist", "big.matrix")

neurosdist <- function(conlist, mask=NULL, ref=NULL, backingprefix=NULL, verbose=T, overwrite=F, ...) {
    
    # 1. Read in reference
    if (is.null(ref))
        ref <- conlist[[1]]@ref
    else if (is.character(ref))
        ref <- nifti.image.read(abspath(ref))
    else if (class(ref) != "nifti")
        stop("Unrecognized type/class for ref argument: ", class(ref))
    
    
    # 2. Read in mask
    if (is.null(mask))
        coords <- conlist[[1]]@coords
    else if (is.character(mask) || class(mask) == "nifti")
        coords <- niGetCoords(mask)
    else
        stop("Unrecognized type/class for mask argument: ", class(mask))
    
    
    # 3. Number of subjects and non-zero voxels
    nSubs <- length(conlist)
    nVoxs <- sum(coords$val)
    
    if (verbose) {
        pb = create.progressbar(nVoxs)
        update.progressbar(pb, 0)
    }
    
    # 4. Create big matrix
    ## each column represents a voxel
    ## and contains all possible pairwise distances (1-cor)
    ## between subjects correlation maps at that voxel
    if (is.null(backingprefix)) {
        bigmat <- big.matrix(nSubs^2, nVoxs, init=0)
    } else {
        backingprefix <- rmext(abspath(backingprefix))
        backingpath <- dirname(backingprefix)
        backingfile <- sprintf("%s.bin", basename(backingprefix))
        descriptorfile <- sprintf("%s.desc", basename(backingprefix))
        neurosdistfile <- sprintf("%s.neurosdist", basename(backingprefix))
        
        if (file.exists(file.path(backingpath, backingfile))) {
            warning("Subject cormap distances already exist")
            if (overwrite == FALSE) {
                message("Returning previous values")
                return(attach.neurosdist(backingprefix))
            }
        }
        
        bigmat <- big.matrix(nSubs^2, nVoxs, init=0, backingpath=backingpath, backingfile=backingfile, descriptorfile=descriptorfile)
        dput(list(nsubs=nSubs, coords=coords, ref=ref$fname), file.path(backingpath, neurosdistfile))
    }
    
            
    # 5. Loop through voxels
    ##   and compute distance between subject correlation maps at each voxel
    
    # Check that dimensions match across connectivity matrices
    templatedim <- dim(conlist[[1]])
    for (s in 2:nSubs)
        if (!all(templatedim == dim(conlist[[s]])))
            stop(sprintf("Dimensions for subject #1 do not match with subject #%i\n", s))
    
    # Create matrix with each column representing a subject's
    # correlation map from voxel v with the rest of the brain
    # Use this on each loop
    voxmaps = matrix(NA, nVoxs, nSubs)
    
    ## note that this function makes use of variables outside of the function
    fundist <- function(v) {
        for (s in 1:nSubs)
            voxmaps[,s] <- conlist[[s]][,v]
        
        bigmat[,v] <- as.vector(1 - cor(voxmaps, ...))
        
        if (verbose)
            update.progressbar(pb, v)
    }
    
    if (getOption("allowParallel") && getDoParRegistered())
        foreach(v=1:nVoxs) %dopar% fundist(v)
    else
        lapply(1:nVoxs, fundist)
    
    if (verbose)
        end.progressbar(pb)
    
    return(new("neurosdist", address=bigmat@address, nsubs=nSubs, coords=coords, ref=ref))
}

attach.neurosdist <- function(backingprefix) {
    # Setup Paths
    backingprefix <- rmext(abspath(backingprefix))
    descriptorfile <- sprintf("%s/%s.desc", dirname(backingprefix), basename(backingprefix))
    neurosdistfile <- sprintf("%s/%s.neurosdist", dirname(backingprefix), basename(backingprefix))
    
    bigmat <- attach.big.matrix(descriptorfile)
    nlist <- dget(neurosdistfile)
    
    return(new("neurosdist", address=bigmat@address, nsubs=nlist$nsubs, coords=nlist$coords, ref=nifti.image.read(nlist$ref)))
}


###
# Distance-Based Multiple Regression
###

adonis.neurosdist <- function(x, xformula, xmodel, nSubs=NULL, permutations=4999, verbose=T, ...) {
    if (is.matrix(x) == TRUE && is.null(nSubs) == TRUE)
        stop("If input is matrix, must supply nSubs")
    else if (class(x) == "neurosdist")
        nSubs <- x@nsubs
    else
        stop("Input must be a matrix or neurosdist object")
    
    nVoxs <- ncol(x)
    nCores <- getOption("cores")
    
    if (verbose) {
        pb = create.progressbar(nVoxs)
        update.progressbar(pb, 0)
    }
    
    xformula <- as.formula(sprintf("maps ~ %s", xformula))
    
    adfun <- function(v) {
        if (verbose)
            update.progressbar(pb, v)
        
        tmp <- x[,v]
        dim(tmp) <- c(nSubs, nSubs)
        maps <- as.dist(tmp)
            
        adonis(xformula, data=xmodel, permutations=permutations, ...)
    }
    
    if (getOption("allowParallel") && getDoParRegistered() && !is.null(nCores))
        adonislist <- foreach(i=1:nVoxs, .packages=c("vegan", "bigmemory")) %dopar% adfun(i)
    else
        adonislist <- lapply(1:nVoxs, adfun)
    
    end.progressbar(pb)
    
    return(adonislist)
}

## DON'T USE BELOW!!!
foreach.adonis.neurosdist <- function(nCores, x, ...) {
    if (class(x) != "neurosdist")
        stop("Input 'x' must be a neurosdist object")
    
    nVoxs <- ncol(x)
    nSubs <- x@nsubs
    
    ## want to split matrix into pieces that pass to adonis
    blocksize <- ceiling(nVoxs/nCores)
    starts <- seq(1, nVoxs, by=blocksize)
    ends <- c(starts[-1], nVoxs)
    n <- length(starts)
    
    if (getDoParRegistered()) {
        options(cores=nCores)
        adonislist <- foreach(i=1:n, .combine="c", .packages=c("vegan", "bigmemory")) %dopar%
            adonis.neurosdist(x[,starts[i]:ends[i]], nSubs=nSubs, ...)
    } else {
        adonislist <- adonis.neurosdist(x, ...)
    }
    
    return(adonislist)
}




