rm(list=ls(all=T))
library(reshape)
library(ggplot2)

dat1 = read.csv('/Users/lars/algo/condensed/H1.condensed.testcorr', sep = '', header = F, row.names = 1)
colnames(dat1) = c('encoding','nd','neurons','correlation')
dat1[,'nd'] = as.factor(dat1[,'nd'])

levels(dat1$encoding)[levels(dat1$encoding)=="bl"] = 'BLOSUM'
levels(dat1$encoding)[levels(dat1$encoding)=="sp"] = 'Sparse'

levels(dat1$nd)[levels(dat1$nd)=="500"] = '12000'
levels(dat1$nd)[levels(dat1$nd)=="5000"] = '16500'
levels(dat1$nd)[levels(dat1$nd)=="50000"] = '61500'

ggplot(aes(x=neurons,y=correlation, color = nd),data=dat1) + 
  geom_point() + 
  facet_grid(encoding ~.) +
  xlab('Neurons') +
  ylab('Test set correlation') +
  scale_color_discrete(name='Pseudo-\nDataset') + 
  theme(legend.position = 'top')
ggsave('/Users/lars/algo/aib-nn/report/Graphics/condensed_testset.png',width=15,height=15,units='cm')

mod = lm(correlation ~ neurons + nd + encoding, data=dat1)
anova(mod)

max(dat[,'correlation'])

neurons = 3

dat = scan('/Users/lars/algo/condensed/holdout1/synapses/syn_sp_50000_nh3.dat',skip=5)
dat = dat[c(rep(T,180),F)]
dat = dat[1:(180*neurons)]
  
dat = cbind(dat,rep(rep(1:9,each=20),times=neurons),rep(1:neurons,each=180),NA)
dat = as.data.frame(dat)
colnames(dat) = c('value','position','neuron','AA')
dat[,2] = as.factor(dat[,2])
dat[,3] = as.factor(dat[,3])
dat[,4] = as.factor(rep(1:20,times=9))

j = 3
for (j in 1:neurons){
  tmp = dat[(dat[,3] == j),]
  m = ggplot(aes(x=value, colour=position),data=tmp) + 
    geom_density() + 
    xlab('Weight') + 
    ylab('Density') + 
    scale_color_discrete(name="Position") + 
    annotate('text',x = Inf, y = Inf, label = 'Holdout 1', hjust = 1.5, vjust = 1.5)
  print(m)
  ggsave(sprintf('/Users/lars/algo/aib-nn/report/Graphics/condensed_neuron%s_%s.png',neurons,j),width=15,height=15,units='cm')
}
