function [evalResults,nModelEvals] = calcObjectiveScoreParallel(dreamPar,evalResults, totalNodes,nModelEvals, iteration)
global WORK_TAG;
 
[lins, cols] = size(evalResults);

iterCol = dreamPar.iterCol;
parCols = dreamPar.parCols;
objCol = dreamPar.objCol;
logPCol = dreamPar.logPCol;
evalCol = dreamPar.evalCol;

totalUsedNodes = min(lins,totalNodes-1);

for worker = 1:totalUsedNodes
    [first,last] = getIndexBounds(lins, totalUsedNodes,worker);
    package = reshape(evalResults(first:last,parCols),1,(last-first+1)*size(parCols,2));
   % printf("The sent size is %d(%d)",(last-first+1)*size(dreamPar.parCols,2),(last-first+1));
    MPI_Send(package,worker, WORK_TAG, MPI_COMM_WORLD);
end

for count = 1:totalUsedNodes
    
    [status statusInfo] = MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG,MPI_COMM_WORLD);
    if status ~= MPI_SUCCESS 
        error(strcat('Error receiving data from the worker ',num2str(worker)));
    end
    
    worker = statusInfo.src;
    [first,last] = getIndexBounds(lins, totalUsedNodes,worker);
    
    [status, receivedNumel] = MPI_Get_elements(statusInfo,[]);
     if status ~= MPI_SUCCESS 
        error(strcat('Error receiving data from the worker ',num2str(worker)));
     end
     
    receivedPackage = zeros([1,receivedNumel]);
    MPI_Recv(receivedPackage,worker,MPI_ANY_TAG,MPI_COMM_WORLD);
    evalResults(first:last,objCol:logPCol)= reshape(receivedPackage,last-first+1,2);
    evalResults(first:last, evalCol) = 1;
%     evalResults(first:last,iterCol) = nModelEvals+[first:1:last];
    
    %disp(num2str(evalResults(first:last,dreamPar.iterCol)));
    %disp(char(10))
    
    nModelEvals = nModelEvals + receivedNumel/2;
    
    evalResults(:,iterCol) = iteration*10 + (1:lins)';
        
    disp(['receivedNumel = ', num2str(receivedNumel)])
end

