function [net,bestsc,x,y] = rb( P,T, PT,TT, Params_nn)
% P,T vstupni a vystupni trenovaci vektory - viz start.m
% PT,TT vstupni a vystupni testovaci vektory - viz start.m
% Params_nn - parametry uceni
% out:
% net - nejlepsi sit kterou jsme nalezli
% bestsc - nejlepsi spread constant (pri ni nejmensi pocet chyb)
% x - vektor vsech spread constant
% y - vektor poctu chyb pro vsechny spread konstanty 
%       neboli pro spread constant x(i) bylo y(i) chyb


numSteps = Params_nn.numSteps;
eg = Params_nn.eg;

%odhad spread konstanty
[sc, minsc, maxsc] = feval(Params_nn.estimscFnc, P);

%dosazeni nejake nejlepsi site pred zacatkem while cyklu
if strcmp(Params_nn.net, 'rb')
    net = newrb(P,T,Params_nn.eg, sc, Params_nn.maxNeurons, Params_nn.display);
elseif strcmp(Params_nn.net, 'pnn')
    net = newpnn(P,T,0.25*sc);
else
    error(['neznama hodnota parametru Params_nn.net']);
end
best = errors(net,PT,TT);
bestsc = sc;

iterations = [];
num = 0;
if (numSteps ~= 0)
    dsc = (maxsc-minsc)/numSteps;
    num = round((maxsc - minsc)/dsc ) + 1;
    iterations = minsc:dsc:maxsc;
end;

x = zeros(1,num);
y = zeros(1,num);
i =0;
% postupne zkouseni ruznych spread constant - nalezneme tu nejlepsi pro
% dane testovaci data a tu vratime
for scd = iterations
    i =i + 1;
    if strcmp(Params_nn.net, 'rb')
        netn = newrb(P,T,Params_nn.eg,scd, Params_nn.maxNeurons, Params_nn.display);
    elseif strcmp(Params_nn.net, 'pnn')    
        netn = newpnn(P,T,0.25*scd);
    else
        error(['neznama hodnota parametru Params_nn.net']);
    end    
    [e,errIn_] = errors(netn,PT,TT);
    x(i) = scd;
    y(i) = e;
    if (e < best)
        bestsc = scd;
        net = netn;
        best = e;
    end;
    if(e == 0)
        break;
    end;

end


