function [maxScoreNet history time] = train(
            layers, 
            lRate, 
            data, 
            labels, 
            minTrnErr=0.004, 
            stagnationThreshold=20,
            validationSet=0.075,				#may either be a percentage of trainset or a separate set ending with label column
            maxIter=1000,
            E=1, 
            up=1.2,
            down=0.5,
            weights={})

	time=cputime;
    ### determine the target architecture
	layers=[size(data,2),layers,size(unique(labels),1)]

	vData=vLabels=[];
	### Partition the data ###
	if(numel(validationSet)==1)
		[vData vLabels data labels] = getMockData(data, labels, 10, validationSet);
	else
		vData=validationSet(:,1:end-1);
		vLabels=validationSet(:,end);
	end;
	%[data labels] = getMockData(data, labels, 10, 1);

	### Prepare some fields
	maxScore = 0;
	maxScoreNet = {};

    #main loop's data
    iter=0;
    stagnationCount=0;
    lRates={};
    history=[];
    pdeltas={}; #holds previous deltas for comparison


    needToGenerateWeights = isempty(weights);
    lRates={};
    for(i=1:size(layers,2)-1)
    	if(needToGenerateWeights)
    		weights{nextCell(weights)}=rand(layers(i)+1,layers(i+1)).*(2*E).-E;
    	end;
    	lRates{nextCell(lRates)}=ones(layers(i)+1,layers(i+1)).*lRate;
    end;

    #initial report
    printf("Initial result: %f\n",evaluate(weights, vData, vLabels));

    #backprop
    do
    	t=cputime;
    	++iter;
	    deltas = {};

    	for(i=size(layers,2)-1:-1:1)
    		deltas{i}=zeros(layers(1,i)+1,layers(1,i+1));
    	end;

    	errs = {};
    	for(i=size(layers,2):-1:2)
    		errs{i}=zeros(1,layers(1,i));
    	end;
	
		[response responses]=askNet(weights, data);
	
		answers = zeros(size(response));
		for(i=1:size(answers,1))
			answers(i,labels(i,1)+1)=1;
		end;	
		
		errs{size(errs,2)}=response.-answers;		
		for(layer=size(errs,2)-1:-1:2)
			errs{layer}=errs{layer+1}*(weights{layer}(1:layers(1,layer),:))'; 
		end;
	
    	for(l=1:size(deltas,2))
    		errors=errs{l+1};
    		if(l<size(deltas,2))
    			errors=errors.*(responses{l+1}.*(ones(size(responses{l+1})).-responses{l+1}));
    		end;
    		deltas{l}+=[responses{l},-ones(size(responses{l},1),1)]'*errors.*(lRates{l});
    	end;
	
	    history=[history; mean(vec(abs(errs{size(errs,2)})))];
	
	    for(i=1:size(layers,2)-1)
    		weights{i}=weights{i}.-deltas{i}/size(labels,1);
    		if(size(pdeltas,2)>0)
	    		signs = sign(pdeltas{i}).*sign(deltas{i});	
	    		signs(signs>0)=up;
	    		signs(signs<=0)=down;
	    		lRates{i}=lRates{i}.*signs;
	    	end;
    	end;
	    pdeltas=deltas;	
	
	    ##some stats
        printf("Iteration: %d, CPU time: %f seconds \n", iter, cputime-t);
	    if(history(size(history,1),1)<minTrnErr)
	    	score = evaluate(weights, vData, vLabels);
	    	if(score>maxScore || (score==maxScore && history(size(history,1),1)<history(size(history,1)-(1+stagnationCount),1)))
	    		maxScore=score;
	    		maxScoreNet=weights;
	    		stagnationCount=0;
	    	else
	    		stagnationCount++;
	    	end;
	    	printf("\t score: %f (best so far: %f), stagnation: %d of %d \n", score, maxScore, stagnationCount,stagnationThreshold);
	    end;

	    printf("\t avg neuron errors in layers: ");
    	for(i=2:size(layers,2))
	    	printf(" %d: %f;",i,mean(abs(vec(errs{i}))));
	    end;
	    printf(" | output goal: %f \n",minTrnErr);
	    fflush(stdout);
    until(iter>maxIter || (history(size(history,1),1)<minTrnErr && stagnationCount>=stagnationThreshold))
	time=cputime-time;
end;
