
rm(list=ls())

load("parsedData-servantFlanker.Rdata")

newSeed=Sys.time()
set.seed(as.numeric(newSeed))





rdata=function(n,x,par.names) {
  names(x)=par.names
  out=list(Cond=NULL,Resp=NULL,Time=NULL)
  
  for (cond in conds) {
    for (stim in stims) {
      N=as.double(n[cond,stim])
      
      params=x
      params.names=c(names(params),"stoch.s")
      params=c(params,0.1)
      
      if (cond=="comp") {
        congruency=1
      } else if (cond=="incomp") {
        congruency=-1
      }
      
      names(params)=params.names
      
      tmp=simulate.DMC(N=N,params=params,maxCounter=use.maxCounter,stepSize=stepSize,congruency=congruency,use.table=use.table,n.table.options=n.table.options)
      
      out$Resp=c(out$Resp,tmp$resp)
      out$Time=c(out$Time,tmp$rt)
      out$Cond=c(out$Cond,rep(cond,n[cond,stim]))
      out$Stim=c(out$Stim,rep(stim,n[cond,stim]))
    }
  }
  out
}

log.dens.like=function(x,data,par.names){
  names(x)=par.names
  out=0
  
  for (cond in conds) {
    for (stim in stims) {
      
      tmp=data$Cond==cond & data$Stim==stim
      
      I=IQR(data$Time[tmp])
      S=sd(data$Time[tmp])
      bandwidth=0.9*min(I,S)*(Nsamples)^(-0.2)
      
      params=x
      params.names=c(names(params),"stoch.s")
      params=c(params,0.1)
      
      if (cond=="comp") {
        congruency=1
      } else if (cond=="incomp") {
        congruency=-1
      }
      
      names(params)=params.names
      
      tmp1=Log.likelihood.fun(all.data=list(rt=data$Time[tmp],resp=data$Resp[tmp]),params=params,Nsample=Nsamples,maxCounter=use.maxCounter,stepSize=stepSize,congruency=congruency,bandwidth=bandwidth,getSimData=simulate.DMC)
      
      if (length(tmp1[[1]]) == 1) {
        tmp2=tmp1[[1]][[1]]
      } else {
        tmp2=c(tmp1[[1]][[1]],tmp1[[1]][[2]])
      }
      
      out=out+sum(log(pmax(tmp2,1e-10)))
      
      out=out+pmax(dbinom(sum(data$Resp[tmp]==-1),length(data$Resp[tmp]),tmp1[[2]]/Nsamples,log=TRUE),log(1e-10))
    }
  }
  out
}

log.dens.prior=function(x,hyper){
  out=0
  for (p in names(x)) out =
    out+dtnorm(x[p],hyper[paste(p,"mu",sep=".")],hyper[paste(p,"sigma",sep=".")],lower.bounds[p],upper.bounds[p],log=TRUE)
  out
}

log.dens.hyper=function(theta,phi,prior,p){
  sum((dtnorm(theta,phi[1],phi[2],lower.bounds[p],upper.bounds[p],log=TRUE))) + 
    (dtnorm(phi[1],prior$mu[1],prior$mu[2],lower.bounds[p],upper.bounds[p],log=TRUE)) + 
    (dgamma(phi[2],prior$sigma[1],prior$sigma[2],log=TRUE))
}

crossover=function(i,pars,use.theta,use.like,data,hyper,par.names,currIT){
  if (currIT %% 5 == 0) use.like[i] = log.dens.like(use.theta[i,],data,par.names=par.names)
  use.weight=use.like[i] + log.dens.prior(use.theta[i,],hyper[i,])
  gamma = 2.38/sqrt(2*length(pars))
  index=sample(c(1:n.chains)[-i],2,replace=F)
  theta=use.theta[i,]						
  theta[pars]=use.theta[i,pars] + gamma*(use.theta[index[1],pars]-use.theta[index[2],pars]) + runif(1,-b,b)
  prior.like=log.dens.prior(theta,hyper[i,])
  if (prior.like > -Inf) {
    like=log.dens.like(theta,data,par.names=par.names)
  } else {
    like = -Inf
  }
  weight=like + prior.like
  if(!is.finite(weight))weight=-Inf
  if(runif(1) < exp(weight-use.weight)) {							
    use.theta[i,]=theta
    use.like[i]=like
  }
  c(use.like[i],use.theta[i,])
}

crossover_hyper=function(i,pars,use.theta,use.phi,prior,p){
  use.weight=log.dens.hyper(use.theta[i,],use.phi[i,pars],prior,p)
  gamma = 2.38/sqrt(2*length(pars))
  index=sample(c(1:n.chains)[-i],2,replace=F)
  phi=use.phi[i,]
  phi[pars]=use.phi[i,pars] + gamma*(use.phi[index[1],pars]-use.phi[index[2],pars]) + runif(1,-b,b)
  weight=log.dens.hyper(use.theta[i,],phi[pars],prior,p)
  if(!is.finite(weight))weight=-Inf
  if(runif(1) < exp(weight-use.weight)) use.phi[i,]=phi
  use.phi[i,]
}


migration.crossover=function(pars,use.theta,use.like,data,hyper,par.names){
  n.migration.chains=ceiling(runif(1,0,n.chains))
  use.chains=sample(1:n.chains,n.migration.chains)
  migration.use.weight=rep(NA,n.migration.chains)
  migration.weight=rep(NA,n.migration.chains)
  for (mi in 1:n.migration.chains) {
    migration.use.weight[mi]=use.like[use.chains[mi]] + log.dens.prior(use.theta[use.chains[mi],pars],hyper[use.chains[mi],])
    newChain = mi - 1
    if (newChain == 0) newChain = n.migration.chains
    migration.weight[mi]=use.like[use.chains[newChain]] + log.dens.prior(use.theta[use.chains[newChain],pars],hyper[use.chains[mi],])
    if(runif(1) < exp(migration.weight[mi]-migration.use.weight[mi])) {      				
      use.theta[use.chains[mi],]=use.theta[use.chains[newChain],]
      use.like[use.chains[mi]]=use.like[use.chains[newChain]]
    }
  }
  cbind(use.like,use.theta)
}


migration.crossover_hyper=function(pars,use.theta,use.phi,prior,p){
  n.migration.chains=ceiling(runif(1,0,n.chains))
  use.chains=sample(1:n.chains,n.migration.chains)
  migration.use.weight=rep(NA,n.migration.chains)
  migration.weight=rep(NA,n.migration.chains)
  for (mi in 1:n.migration.chains) {
    migration.use.weight[mi]=log.dens.hyper(use.theta[use.chains[mi],],use.phi[use.chains[mi],pars],prior,p=p)
    newChain = mi - 1
    if (newChain == 0) newChain = n.migration.chains
    migration.weight[mi]=log.dens.hyper(use.theta[use.chains[mi],],use.phi[use.chains[newChain],pars],prior,p=p)
    if(runif(1) < exp(migration.weight[mi]-migration.use.weight[mi])) {        			
      use.phi[use.chains[mi],pars]=use.phi[use.chains[newChain],pars]
    }
  }
  use.phi
}



library(msm)
library(foreach)
library(doParallel)
source("../Code/simulate-DMC_v2.R")
source("../Code/PDA.R")
dyn.load("../Code/DMC_v2.so")


newSeed=Sys.time()
set.seed(as.numeric(newSeed))

stepSize=0.01

use.maxCounter=150







start.points=c(.3,.2,.3,5,30,3,100,0.1,0.1)
lower.bounds=c(-Inf,0,0,1,0,1,0,0,0)
upper.bounds=c(Inf,Inf,Inf,Inf,Inf,Inf,Inf,Inf,Inf)


n.chains=27
n.pars=9
n.hpars=18
nmc=3000
Nsamples=10000

migration.start=1000
migration.end=1750
migration.freq=14



b=.001

theta=array(NA,c(n.chains,n.pars,S,nmc))
phi=array(NA,c(n.chains,n.hpars,nmc))
weight=array(-Inf,c(nmc,n.chains,S))


theta.names = c("v", "a", "ter", "betaDist", "gamma", "alpha","tau","terSD","terTrunc")
colnames(theta) = theta.names
phi.names=paste(rep(theta.names,each=2),c("mu","sigma"),sep=".")
colnames(phi) <- phi.names

names(lower.bounds) = names(upper.bounds) = theta.names



registerDoParallel(cores=8)

for(i in 1:n.chains){
  temp=foreach(j=1:S) %dopar% {
    current.weight=weight[1,i,j]
    while (current.weight==-Inf) {
      current.thetas=rtnorm(n=n.pars,mean=start.points,sd=start.points/5,lower.bounds,upper.bounds)
      current.weight=log.dens.like(current.thetas,data=data[[j]],par.names=theta.names)
    }
    c(current.weight,current.thetas)
  }
  for (j in 1:S) {
    theta[i,,j,1]=temp[[j]][2:(n.pars+1)]
    weight[1,i,j]=temp[[j]][1]
  }

  tmp=grep("v",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=.3,sd=.3,-Inf,Inf)
  tmp=grep("a",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=.2,sd=.2,0,Inf)
  tmp=grep("ter",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=.3,sd=.3,0,Inf)
  tmp=grep("betaDist",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=5,sd=5,1,Inf)
  tmp=grep("gamma",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=30,sd=30,0,Inf)
  tmp=grep("alpha",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=3,sd=3,1,Inf)
  tmp=grep("tau",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=100,sd=100,0,Inf)
  tmp=grep("terSD",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=.2,sd=.2,0,Inf)
  tmp=grep("terTrunc",phi.names)
  phi[i,tmp,1]=rtnorm(n=length(tmp),mean=.2,sd=.2,0,Inf)
}


prior=list()

tmp=grep("v",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(.3,.3),sigma=c(1,1))
}

tmp=grep("a",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(.2,.2),sigma=c(1,1))
}

tmp=grep("ter",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(.3,.3),sigma=c(1,1))
}

tmp=grep("betaDist",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(5,5),sigma=c(1,1))
}

tmp=grep("gamma",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(30,30),sigma=c(1,1))
}

tmp=grep("alpha",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(3,3),sigma=c(1,1))
}

tmp=grep("tau",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(100,100),sigma=c(1,1))
}

tmp=grep("terSD",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(0.2,0.2),sigma=c(1,1))
}

tmp=grep("terTrunc",theta.names,value=TRUE)
for (n in 1:length(tmp)) {
  tmp2=tmp[n]
  prior[[tmp2]]=list(mu=c(0.2,0.2),sigma=c(1,1))
}


savefile=paste("Hier_Fits/DMC_v2-BayesHierFit",nmc,"servantFlanker_complex",sep="-")

savefile=paste(savefile,"Rdata",sep=".")

begin = date()
for(i in 2:nmc){
  cat("\n ",i,"  ")
  if (i %% 100 == 0) save.image(savefile)
  phi[,,i]=phi[,,i-1]
  rand.samp=sample(1:n.chains,n.chains)
  for (p in theta.names) {
    which.theta=match(x=p,table=theta.names)
    which.phi=match(x=paste(p,c("mu","sigma"),sep="."),table=phi.names)
    if (i %% migration.freq == 0 & i > migration.start & i < migration.end) {
      phi[,,i]=migration.crossover_hyper(pars=which.phi,use.theta=theta[rand.samp,which.theta,,i-1],use.phi=phi[,,i],prior=prior[[p]],p=p)
    } else {
      phi[,,i]=t(sapply(1:n.chains,crossover_hyper,pars=which.phi,use.theta=theta[rand.samp,which.theta,,i-1],use.phi=phi[,,i],prior=prior[[p]],p=p))
    }
  }
  rand.samp=sample(1:n.chains,n.chains)
  hyper=phi[rand.samp,,i]
  
  
  if (i %% migration.freq == 0 & i > migration.start & i < migration.end) {
    temp=foreach(j=1:S) %dopar% migration.crossover(pars=1:n.pars,use.theta=theta[,,j,i-1],use.like=weight[i-1,,j],data=data[[j]],hyper=hyper,par.names=theta.names)
  } else {
    temp=foreach(j=1:S) %dopar% t(sapply(1:n.chains,crossover,pars=1:n.pars,use.theta=theta[,,j,i-1],use.like=weight[i-1,,j],data=data[[j]],hyper=hyper,par.names=theta.names,currIT=i))
  }
  
  for(j in 1:S){
    weight[i,,j]=temp[[j]][,1]
    theta[,,j,i]=temp[[j]][,2:(n.pars+1)]
  }
}
end = date()
begin
end


burnin=2000
n.per=20
n.posteriors=50

save.image(savefile)

synth.data=list()
for (i in 1:S) {
  cat(i)
  tmpn=table(data[[i]]$Cond,data[[i]]$Stim)
  tmpdat=list(Stim=NULL,Cond=NULL,Time=NULL,Resp=NULL)
  for (m in round(seq(from=burnin,to=nmc,length.out=n.posteriors))) {
    pars=theta[sample(n.chains,1),,i,m]
    tmp=rdata(n=tmpn*n.per,x=pars,par.names=theta.names)
    tmpdat$Cond=c(tmpdat$Cond,tmp$Cond)
    tmpdat$Stim=c(tmpdat$Stim,tmp$Stim)
    tmpdat$Time=c(tmpdat$Time,tmp$Time)
    tmpdat$Resp=c(tmpdat$Resp,tmp$Resp)
  }
  synth.data[[i]]=data.frame(tmpdat)
}
names(synth.data)=names(data)




save.image(savefile)

