library(verification)
library(liblinear)
library(randomforest)
library(dummies)

trainPerc = .9

trainData <- read.csv("data/train.csv") 
testData <- read.csv("data/test.csv")

x <- trainData[-ACTION]
y <- 

train = sample(1:dim(trainData)[1],trainPerc*dim(trainData)[1])

xTrain=xTrain[train,]
xTest=xTrain[-train,]
yTrain=y[train]
yTest=y[-train]

# Center and scale data
s=scale(xTrain,center=TRUE,scale=TRUE)
# Logistic Regression
t=0
# Tune the cost parameter of a logistic regression via a 10-fold cross-validation
try=c(1000,100,10,1,0.1,0.01,0.001)
res=c()
for(co in try){
acc=LiblineaR(data=s,labels=yTrain,type=t,cost=co,bias=TRUE,cross=10,verbose=FALSE)
res=c(res,acc)
cat("Results for C=",co," : ",acc," accuracy.\n",sep="")
}
# Re-train a model with best C value.
best=which.max(res)
co=try[best]
m=LiblineaR(data=s,labels=yTrain,type=t,cost=co,bias=TRUE,verbose=FALSE)

#forest <- randomForest(ACTION~.,data=trainData)
#roc.area(trainData$ACTION,forest$predicted)