#Establish directories

in.dir = '/home1/99/jc152199/brt/data/'
setwd(in.dir)
out.dir = '/home1/99/jc152199/brt/data/'

# Load the gbm library to perform Boosted Regression Tree Analysis
necessary=c('gbm','SDMTools','random','sp','rgdal')
#check if library is installed
installed = necessary %in% installed.packages()
#if library is not installed, install it
if (length(necessary[!installed]) >=1) install.packages(necessary[!installed], dep = T)
#load the libraries
for (lib in necessary) library(lib,character.only=T)

# Read in data to plot

valid.error.summary = read.csv('valid.error.summary.csv',header=T)

# Forgot to store tree.complexity in this data frame, so add one column with tree complexity

valid.error.summary$tree.complexity=c(NA,rep(1,20),rep(3,20))

# Remove first row which is all NA's, then learn how to create dataframe without the first row being NA

valid.error.summary = valid.error.summary[2:nrow(valid.error.summary),]

# Define a list of tree complexities

tc.list = c(unique(valid.error.summary$tree.complexity))
lr.list = c(unique(valid.error.summary$learning.rate))
tf.list = c(unique(valid.error.summary$train.fraction))

# Run loop to plot data
# Will produce one plot for each level of tree complexity
# Plotting learning rate against validation error for a number of training fractions

for (tc in tc.list)

	{
	
	single.tc = subset(valid.error.summary, tree.complexity==tc)
	
	# Define the limits for the x and y axis of the plot space

	ylims = c(0,max(single.tc$min.valid.error))
	lr.xlims = c(0,max(single.tc$learning.rate))
	
	png(paste('valid.error.comparison.tc-',tc,'.png',sep=''))
	
	plot(single.tc$learning.rate, single.tc$min.valid.error, xlab = 'Learning Rate', ylab = 'Min Validation Error', main = paste('Validation Error Comparison Tree Complexity - ',tc,sep=''), type='n', xlim=c(lr.xlims), ylim=c(ylims))
	
	i=1 # Used to assign cols for lines
	
	j = max(ylims-.1) # Used to assign legend position
	
		for (tf in tf.list)

		{
		
		single.tf = subset(single.tc, train.fraction==tf)
		
		cols=rainbow(nrow(single.tf))
		
		legend(x=.15, y=j, legend=c(paste('Training Fraction - ',single.tf$train.fraction[1],sep='')), text.col=cols[i] ,bty="o", bg='grey')
		
		lines(single.tf$learning.rate, single.tf$min.valid.error,col=cols[i], type='b', lwd=1.5, cex=1.5, pch=16)
		
		i=i+1 # Increase i incrementally
		
		j=j-((max(ylims)-min(ylims))/12) # Decrease j incrementally
		
		}
		
		dev.off()
			
	}		
	
# End

# Run loop to plot data
# Will produce one plot for each level of learning rate
# Plotting tree complexity against validation error for a number of training fractions

for (lr in lr.list)

	{
	
	single.lr = subset(valid.error.summary, learning.rate==lr)
	
	# Define the limits for the x and y axis of the plot space

	ylims = c(0,max(single.lr$min.valid.error))
	tc.xlims = c(0,max(single.lr$tree.complexity))
	
	png(paste('valid.error.comparison.lr-',lr,'.png',sep=''))
	
	plot(single.lr$tree.complexity, single.lr$min.valid.error, xlab = 'Tree Complextiy', ylab = 'Min Validation Error', main = paste('Validation Error Comparison Learning Rate - ',lr,sep=''), type='n', xlim=c(tc.xlims), ylim=c(ylims))
	
	i=1 # Used to assign cols for lines
	
	j = min(ylims+.1) # Used to assign legend position
	
		for (tf in tf.list)

		{
		
		single.tf = subset(single.lr, train.fraction==tf)
		
		cols=rainbow(nrow(single.lr))
		
		legend(x=0, y=j, legend=c(paste('Training Fraction - ',single.tf$train.fraction[1],sep='')), text.col=cols[i] ,bty="o", bg='grey')
		
		lines(single.tf$tree.complexity, single.tf$min.valid.error,col=cols[i], type='b', lwd=1.5, cex=1.5, pch=16)
		
		i=i+1 # Increase i incrementally
		
		j=j+(max(ylims)/12) # Decrease j incrementally
		
		}
		
		dev.off()
			
	}		
	
# End
