# scratch.R
# 
# TODO: Add comment
#
# Author: rafaelalgara
###############################################################################


#***************************************************************
#***************************************************************
#***************************************************************

source('dataLib.R')

getDbSet <- function (useCache=FALSE) {
	if (!useCache) {
		sql <- 
				"SELECT event_id,source_tstmp,millis,note,if(status=144,'DOWN','UP') as status,pressure
				FROM klavier.history h
				order by event_id desc limit 1000"
		#limit 1000
		startTime <- Sys.time()
		sprintf("Starting record fetch at %s", as.character(startTime))
		dbset <<- fetchResult(sql)
		endTime <- Sys.time()
		sprintf("Ended record fetch at %s", as.character(endTime))
		sprintf("Read %i rows in %s seconds",nrow(dbset),as.character((endTime - startTime)))
		e0 <- cbind(dbset,tstmp=strptime(dbset$source_tstmp,"%Y-%m-%d %H:%M:%OS"))
		fields <- c('event_id','tstmp','millis','note','pressure','status')
		e1 <- e0[,fields]
		e2 <- e1[order(e1$note,e1$event_id),] # sort by note,event_id for contiguity
		NULL_ROW <- structure(
				list(event_id=0,tstmp=as.POSIXct(0,origin='1960-01-01'),millis=0,note=0,pressure=0,status='NULL')
				)
		e3 <- cbind(e2,rbind(e2[-1,],NULL_ROW)) # subtract series from itself offset by 1 to get UP/DOWN consolidated record
		e4 <- e3[e2$status=='DOWN',] # Filter in DOWN events
		fields <- c('event_id','event_id.1','tstmp','tstmp.1','note','pressure')
		e5 <- e4[,fields]
		e6 <- cbind(e5,duration=as.double((e5[,'tstmp.1'] - e5[,'tstmp'])))
		#--- CLEANSE
		#dirty_index <- which(is.na(e4$note)) # Isolate nulls
		#dirty_index <- c(dirty_index,which(e4$note != e4$note.1)) # Isolate non-matching notes
		#dirty_index <- c(dirty_index,which(e4$status == e4$status.1)) # Isolate improperly matched events (UP/UP, DOWN/DOWN)
		#dirty <- e4[dirty_index,] 
		#e5 <- e4[-dirty_index,]
		
		#--- 
		#e6 <- e5[,setdiff(fields,'status')]
		#e7 <- cbind(e6,duration=(e5$millis.1 - e6$millis))
		e <- e6
		return e
	}
}


#dbset <- getDbSet()

#graphics.off()
#par(mfcol=c(3,2))
#PLOT.OUTLIERS=T
#hist(e$pressure,main="Pressure distribution",xlab="MIDI Pressure",col='lightblue')
#boxplot(e$pressure,col='lightblue',horizontal=T,outline=PLOT.OUTLIERS,main="",xlab="MIDI Pressure")
# Duration outliers skew distribution. Select only up to K times the mean
#K <- 4
#duration <- e$duration[
#					e$duration < K * mean(e$duration)
]
#hist(duration,main="Duration distribution",xlab="Milliseconds",col='lightblue')
#boxplot(duration,col='lightblue',horizontal=T,outline=PLOT.OUTLIERS,main="",xlab="Milliseconds")

#e0<-e[order(e$tstmp),][1:100,]
#matplot(e0$tstmp,e0[,c('note','pressure')],type='s',ylim=c(0,128))
#matplot(e0$tstmp,e0[,c('duration')],type='s',ylim=c(0,3))

#loooooong <- e7[e7$duration>50000,][1:10,]

#---------

#sql <- "select date(event_tstmp) as date,count(*) as count from history group by date(event_tstmp)"
#dbset <<- fetchResult(sql)
#dates <- seq(as.Date(min(dbset$date)),as.Date(max(dbset$date)),by="day")
#dates <- data.frame(dates)
#merge(x=dbset,y=dates,by.x="date",by.y="dates") # returns EMPTY!!!














