function [ out ] = MinSGD( data ,patten)
% MinSGD: 随机梯度下降(Stochastic gradient descent)
%
% Extended description

if ~patten
  patten = struct();
  % 学习率:patten.r default:0.1
  patten.r = 0.01;
  % 优化目标:patten.trgt default:0.01
  patten.trgt = 0.0001;
  % 最大步数:patten.step default:100
  patten.step = 10000;
end

argList = zeros(1,size(data,1));
[bestSum,bestAvg] = MinSGDCalSum(data,argList);
bestArgList = argList;
[lastSum,lastAvg,lastArgList] = deal(bestSum,bestAvg,bestArgList);


for i = 1:patten.step
  argList = MinSGDCalSelected(data,argList,[unidrnd(size(data,2))],patten);
  [sum,avg] = MinSGDCalSum(data,argList);
  if bestAvg > avg
    if bestArgList == lastArgList & abs(avg-bestAvg) < patten.trgt
      [bestSum,bestAvg,bestArgList] = deal(sum,avg,argList);
      break
    end
    [bestSum,bestAvg,bestArgList] = deal(sum,avg,argList);
  end
end

out = [bestSum,bestAvg,bestArgList];

end  % function

function [ out ] = MinSGDCalSelected( data,argList,selectIndexList,patten)
% MinSGDCalSelected: 对选择列进行一次梯度下降
%
% Extended descriptio

if selectIndexList
  index = getSelectIndex(size(data,2),selectIndexList);
  dataWithStep = data(:,index);
  dataWithoutStep = data(:,~index);
else
  dataWithStep = data;
  dataWithoutStep = [];
end

argSize = size(argList);

symList = sym('w',argSize);
argListWithSym = argList + symList;

% 代入变化值
[sum,avg] = MinSGDCalSum(dataWithStep,argListWithSym);

% 学习率:patten.r default:0.1
r = patten.r;
wd = zeros(argSize);
for i=1:size(symList,2)
  %
  wd = subs(diff(avg,symList(i)),symList,zeros(argSize));
end
argList = argList - patten.r * wd;

out = argList;

end  % function

function [out,avg] = MinSGDCalSum(data,argList)
  %% MinSGDCalSum: 计算目标函数
  %
  % argList: [w0,w1,w2,w3...]
  % data: [x1;x2;x3;...xn;y]
  %

  dataSize = size(data,2);

  % step1:[ones(dataSize);data(1:end-1,:)] -> [1;x1;x2;x3;...;xn]
  % step2:argList*{step1} -> w0+w1x1+w2x2+..+wnxn
  % step3:sum((step2-data(end,:)).^2) -> (y`1-y1)^2+(y`2-y2)^2+...+(y`n-yn)^2
  %   y` -> y预计值
  %
  out = sum((argList*[ones(1,dataSize);data(1:end-1,:)] - data(end,:)).^2);
  avg = out/dataSize;
end

function out = getSelectIndex(count,indexList)
  %% getSelectIndex: 返回选择列的布尔列表
  %
  % count:列的量
  % trgt:选择列 index列表
  %
  out = arrayfun(@(x) ismember(x,indexList),[1:count]);
end
