function [f,g_diff] = check_grad(x,funObj,varargin)% [f,g] = autoGrad(x,useComplex,funObj,varargin)%% Numerically compute gradient of objective function from function valuesp = length(x);[f, user_g] = funObj(x,varargin{:});diff_g = zeros(size(user_g));mu = 2*sqrt(1e-12)*(1+norm(x))/norm(p);% check params in random order. Fail as soon as bad gradient found rp = randperm(p);i = 1;for j = rp    e_j = zeros(p,1);    e_j(j) = 1;    diff_g(j,1) = funObj(x + mu*e_j,varargin{:});    diff_g(j,1) = (diff_g(j,1)-f)/mu;    if abs(user_g(j)-diff_g(j)) > 1e-4        fprintf('bad grad at index %d. Returning early\n',j);        g_diff = user_g - diff_g;        return    end;    if mod(i,100) == 0        disp(i);    end;    i = i+1;endg_diff = user_g - diff_g;