function [ trainError,testError ] = logisticRegression( xtrain, ytrain, xtest, ytest, alpha, epsilon )
    
    weights = ones ( 1, size(xtrain,2)+1);
    
    xtrain(:,size(xtrain,2)+1) = 1;
    xtest(:,size(xtest,2)+1) = 1;
    %to prevent it from running forever
    iteration = 0;
    MAXITERATION = 1000;
    tempSum = ones( 1, size(xtrain,2) )*10;
    lambda = 0;
    while (norm(tempSum)>epsilon && iteration < MAXITERATION)
        iteration = iteration + 1;
        tempSumPrev = tempSum;
        tempSum = zeros( 1, size(xtrain,2) );
        for i=1:size(xtrain,1)
            tempSum = tempSum + (ytrain(i) - 1/(1+ exp(-weights*xtrain(i,:)')))*xtrain(i,:);
        end
        %tempSum = tempSum - weights/lambda;
        weightsPrev = weights;
        weights = weights + alpha * tempSum;
        %norm(tempSum)
        if(norm(tempSum) > norm(tempSumPrev) && iteration > 2)
            weights = weightsPrev;
            break;
        end
    end
    

    trainProb = zeros(size(xtrain,1),1);
    for trainIndex=1:size(xtrain,1)
        trainProb(trainIndex) = 1/(1+exp(-weights * xtrain(trainIndex,:)'));
    end
    
    testProb = zeros(size(xtest,1),1);
    for testIndex=1:size(xtest,1)
        testProb(testIndex) = 1/(1+exp(-weights * xtest(testIndex,:)'));
    end
    trainResult = round(trainProb);
    testResult = round(testProb);
    
    trainError = sum(trainResult ~= ytrain)/size(ytrain,1);
    testError = sum(testResult ~= ytest)/size(ytest,1);
end

