#Establish directories

in.dir = '/home1/99/jc152199/brt/data/cv.error.summary.4000trees/'
setwd(in.dir)
out.dir = '/home1/99/jc152199/brt/data/cv.error.summary.4000trees/'

# Load the gbm library to perform Boosted Regression Tree Analysis
necessary=c('gbm','SDMTools','random','sp','rgdal')
#check if library is installed
installed = necessary %in% installed.packages()
#if library is not installed, install it
if (length(necessary[!installed]) >=1) install.packages(necessary[!installed], dep = T)
#load the libraries
for (lib in necessary) library(lib,character.only=T)

# Read in data to plot
#cv.error.summary = read.csv('cv.error.summary.3000trees.csv',header=T)
cv.error.summary = read.csv('cv.error.summary.4000trees.csv',header=T)

# Remove first row which is all NA's, then learn how to create dataframe without the first row being NA
cv.error.summary = cv.error.summary[2:nrow(cv.error.summary),]

		# Create fake data to plot

		#lr = c(.01,.05)
		#tc = c(1,3)
		#nt = 500
		#ot = c(100,200,300,500)

		#fake.error1 = c(seq(1,20,(19/500))[1:500])
		#fake.error2 = c(seq(1,10,(9/500))[1:500])
		#fake.error3 = c(seq(1,15,(14/500))[1:500])
		#fake.error4 = c(seq(1,5,(4/500))[1:500])

		#t.fake1 = data.frame(learning.rate=c(rep(lr[1],nt)),tree.complexity=c(rep(tc[1],nt)),iteration=c(seq(1,nt,1)),optimal.tree.num=c(rep(ot[1],nt)),cv.error=c(fake.error1))
		#t.fake2 = data.frame(learning.rate=c(rep(lr[1],nt)),tree.complexity=c(rep(tc[2],nt)),iteration=c(seq(1,nt,1)),optimal.tree.num=c(rep(ot[2],nt)),cv.error=c(fake.error2))
		#t.fake3 = data.frame(learning.rate=c(rep(lr[2],nt)),tree.complexity=c(rep(tc[1],nt)),iteration=c(seq(1,nt,1)),optimal.tree.num=c(rep(ot[3],nt)),cv.error=c(fake.error3))
		#t.fake4 = data.frame(learning.rate=c(rep(lr[2],nt)),tree.complexity=c(rep(tc[2],nt)),iteration=c(seq(1,nt,1)),optimal.tree.num=c(rep(ot[4],nt)),cv.error=c(fake.error4))

		#fake.summary = rbind(t.fake1,t.fake2,t.fake3,t.fake4)

# Set up a loop to plot all models with equal tc or equal lr on the same page
# First define lists of available lr's and tc's

lr.list = c(unique(cv.error.summary$learning.rate))
tc.list = c(unique(cv.error.summary$tree.complexity))

# Define i as zero, then within the innermost loop use i = i+1 to change colours incrementally

i=0

# Define j as c(1,2,3) then at the end of the outermost loop use j = j+3 to change colours incrementally in sync with i

j=c(1,2,3)

# Define the limits for the x and y axis of the plot space

ylims = range(cv.error.summary$cv.error)
xlims = c(0,max(cv.error.summary$iteration))

# Create a data frame to bind opt.tree to

opt.tree = data.frame(learning.rate = NA, tree.complexity = NA, iteration = NA, optimal.tree.num = NA, cv.error = NA)

# Run loop to plot data and create aggregate list of optimal trees

for (lrs in lr.list)

	{
	
	single.lr = subset(cv.error.summary, learning.rate==lrs)
	
	png(paste('cv.error.comparison.lr',lrs,'.png',sep=''))
	
	plot(single.lr$iteration, single.lr$cv.error, xlab = 'Iteration', ylab = 'Cross-Validation Error', main = paste('CV Error Comparison - Learning Rate - ',lrs,sep=''), col=i, type='n', xlim=c(xlims), ylim=c(ylims))
	
	legend("topright", legend=c(paste('Tree Complexity - ',tc.list[1],'Op Tree Num - ',paste(cv.error.summary[which(cv.error.summary$tree.complexity == tc.list[1] & cv.error.summary$learning.rate==lrs)[1],4]),sep=''),paste('Tree Complexity - ',tc.list[2],'Op Tree Num - ',paste(cv.error.summary[which(cv.error.summary$tree.complexity == tc.list[2] & cv.error.summary$learning.rate==lrs)[1],4]),sep=''),paste('Tree Complexity - ',tc.list[3],'Op Tree Num - ',paste(cv.error.summary[which(cv.error.summary$tree.complexity == tc.list[3] & cv.error.summary$learning.rate==lrs)[1],4]),sep='')), text.col=rainbow(length(unique(cv.error.summary$learning.rate))*length(unique(cv.error.summary$tree.complexity)))[j] ,bty="n")
		
	for (tcs in tc.list)
	
		{
		
		i = i+1 #Used to create color scheme
		
		single.tc = subset(single.lr, tree.complexity==tcs)
		opt.tree1 = subset(single.tc, iteration==optimal.tree.num)
		# Think I need to adjust plot space within the innermost loop
		# i.e. define line colour, and put a point where the optimal tree number lies
			
		lines(single.tc$cv.error,col=rainbow(length(unique(cv.error.summary$learning.rate))*length(unique(cv.error.summary$tree.complexity)))[i])
		points(opt.tree1$iteration, opt.tree1$cv.error, col=rainbow(length(unique(cv.error.summary$learning.rate))*length(unique(cv.error.summary$tree.complexity)))[i], cex=1.5)
		opt.tree = rbind(opt.tree, opt.tree1)
		
		}
	
	j=j+3 # Used to assign colors to legends j+n where n is the number of number of items in tc.list
	
	dev.off()
	
	}
	
# Aggregate dataset based on learning rate and tree. complexity to show the optimal number of trees

opt.tree.agg = aggregate(cv.error.summary$optimal.tree.num, by=list(cv.error.summary$tree.complexity, cv.error.summary$learning.rate), FUN=mean)

opt.tree.loop = opt.tree[c(2:nrow(opt.tree)),]
	
