function [ out1,out2 ] = MinBpNNet(patten,varargin)

  % logistic回归
  fn = @(x) 1/(1+exp(-x));
  dfn = @(x) exp(-x)/(exp(-x) + 1)^2;

  if strcmp(patten,'train')
    % 训练神经网络
    % varargin : {data,layList}
    [out1,out2] = MinBpNNetTrain(patten,varargin{1},varargin{2},fn,dfn);
  elseif strcmp(patten,'use')
    % 利用
    % varargin : {data,wLayList,ps}
    data = mapminmax('apply',varargin{1},varargin{3});
    [out1,out2] = calOutOfAllLayer(data,varargin{2},size(varargin{2},2),fn,dfn);
  end

end

function [ wLayList,ps ] = MinBpNNetTrain(patten,data,layList,fn,dfn )
  % MinBpNNet: BP神经网络
  %
  % data 入力数据[x1_1,x2_1,x3_1...xN_1;
  %              x1_2,x2_2,x3_2...xN_2;
  %              x1_3,x2_3,x3_3...xN_3;
  %              ...
  %              x1_m,x2_m,x3_m...xN_m;
  %              y1,y2,y3...yN]
  % patten
  % layList 各层神经元数
  %
  % Extended description

  % 数据维度
  dataSize = size(data,2);
  dim = size(data,1) - 1;
  yData = data(end,:);

  % 随机初始化参数矩阵
  laySize =size(layList,2);
  wLayList = cell(1,laySize);

  for i=1:laySize
    deforeDim = dim;
    if i > 1
      deforeDim = layList(i-1);
    end
    wLayList{i} = unifrnd(-1,1,layList(i),deforeDim+1);
  end

  % 数据归一化
  [data,ps] = mapminmax(data(1:end-1,:));

  % 初始学习率
  r = 0.08;
  % 学习率变化系数
  rd = 0.3;
  % 学习率调整最大次数 50好像也不够
  maxStep = 50;
  while r ~= 0
    wdLayList = MinBpNNetCalGrad( data,yData,dataSize,patten,layList,wLayList,laySize,fn,dfn);
    [ r,wLayList ] = MinBpNNetAdjustR( data,yData,dataSize,patten,layList,wLayList,wdLayList,laySize,fn,dfn,r,rd,maxStep);
  end


end  % MinBpNNet

function [ wdLayList ] = MinBpNNetCalGrad( data,yData,dataSize,patten,layList,wLayList,laySize,fn,dfn)
% MinBpNNetCalGrad: BP神经网络计算梯度
%
% data 归一化后入力数据(不含期望值)
% yData 期望值
% patten
% layList 各层神经元数
% wLayList 各层神经元参数{[w1_0,w1_1,w1_2; [w1_0,w1_1,w1_2,w1_3;
%                         w2_0,w2_1,w2_2;
%                         w3_0,w3_1,w3_2],w2_0,w2_1,w2_2,w2_3],[]...[]}
% fn 激活函数
% dfn 激活函数的导函数
%
%
%
%
%
% Extended description

% 计算各层输出
[outLayList,doutLayList] = calOutOfAllLayer(data,wLayList,laySize,fn,dfn);

% 初始化梯度
wdLayList = cell(1,laySize);
for i=1:laySize
  wdLayList{i} = zeros(size(wLayList{i}));
end

% 计算最终误差
topErrorList = yData - outLayList{end};

% 循环各数据
for di=1:dataSize
  % 计算各层误差
  ErrorList = cell(1,laySize);
  for i=laySize:-1:1
    if i == laySize
      ErrorList{i} = topErrorList(di);
    else
      ErrorList{i} = calErrorSignal(ErrorList{i+1},wLayList{i+1});
    end
  end

  % 计算梯度
  for i=1:laySize
    dout = doutLayList{i}(di);
    if i == 1
      wdLayList{i} = wdLayList{i} + calGrad( ErrorList{i},data(:,di),dout);
    else
      wdLayList{i} = wdLayList{i} + calGrad( ErrorList{i},outLayList{i-1}(:,di),dout);
    end
  end
end

end  % MinBpNNetCalGrad

function [ rOut,wLayListOut ] = MinBpNNetAdjustR( data,yData,dataSize,patten,layList,wLayList,wdLayList,laySize,fn,dfn,r,rd,maxStep)
% MinBpNNetAdjustR: 调整学习率
%
% data 归一化后入力数据(不含期望值)
% yData 期望值
% patten
% layList 各层神经元数
% wLayList 各层神经元参数{[w1_0,w1_1,w1_2; [w1_0,w1_1,w1_2,w1_3;
%                         w2_0,w2_1,w2_2;
%                         w3_0,w3_1,w3_2],w2_0,w2_1,w2_2,w2_3],[]...[]}
% fn 激活函数
% dfn 激活函数的导函数
%
% Extended description

% 计算当前误差
outLayList = calOutOfAllLayer( data,wLayList,laySize,fn,dfn );
error = sum((yData - outLayList{end}).^2);

rOut = 0;
tmpR = r;

for i=1:maxStep

  tmpWLayList = cell(1,laySize);
 % 计算调整后网络权重
  for j=1:laySize
    tmpWLayList{j} = wLayList{j}  + wdLayList{j}  * tmpR;
  end

  % 计算误差
  outLayList = calOutOfAllLayer( data,tmpWLayList,laySize,fn,dfn );
  tmpError = sum((yData - outLayList{end}).^2);

  % 调整学习率
  % 当前误差小于最小误差时，提升学习率 r=r*(1+rd) ; 0<rd<1
  % 当前误差大于最小误差时，降低学习率 r=r*(1-rd) ; 0<rd<1
  if tmpError < error
    error = tmpError;
    rOut = tmpR;
    tmpR = tmpR * (1+rd);
  else
    tmpR = tmpR * (1-rd);
  end
end
[i,rOut]

wLayListOut = cell(1,laySize);
for j=1:laySize
  wLayListOut{j} = wLayList{j}  + wdLayList{j}  * rOut;
end

end  % MinBpNNetAdjustR

function [ outLayList,doutLayList ] = calOutOfAllLayer( data,wLayList,laySize,fn,dfn )
% calOutOfAllLayer: 计算各层输出
%
% Extended description

outLayList = cell(1,laySize);
doutLayList = cell(1,laySize);
for i=1:laySize
  beforeOut = data;
  if i > 1
    beforeOut = outLayList{i-1};
  end
  [outLayList{i},doutLayList{i}] = calOutOfLayer(beforeOut,wLayList{i},fn,dfn);
end

end  % calOutOfAllLayer

function [ out,dout ] = calOutOfLayer(data,wLayer,fn,dfn)
  % calOutOfLayer: 根据给定层神经元的系数和上一层结果计算本层输出
  %
  % Extended description
  dataSize = size(data,2);
  eMat = wLayer*[ones(1,dataSize);data];
  out = arrayfun(fn,eMat);
  dout = arrayfun(dfn,eMat);

end  % calOutOfLayer

function [ out ] = calErrorSignal( topError,wLayer )
  % calErrorSignal: 根据本层误差和神经元系数计算前一层误差
  %
  % Extended description
  if size(topError,2) > 1
    out = topError(2:end) * wLayer;
  else
    out = topError * wLayer;
  end

end  % calErrorSignal

function [ out ] = calGrad( error,LastOut,dout)
  % calNewWLayer: 计算梯度
  %
  % j->i
  % error = [error_1,error_2,...error_i]
  % LastOut = [LastOut_1;LastOut_2;...LastOut_j]
  % dout = [dout_1;dout_2;...dout_i]
  %
  % w_i_j' = error_i * dout_i * (1|LastOut_j)
  %
  if size(error,2) > 1
   error = error(2:end);
  end
  out =(error'.*dout)*[1,LastOut'];

end  % calNewWLayer
