clear;close;clc;
% 准备数据

% matlab 可以使用 iris_dataset
% ##########################
% [X, Y] = iris_dataset;
% X = X';
% Y = Y';
% ##########################

% octave 需要手动下载数据
% ############################
data = load('iris-150x5.data');
X = data(:, 1:end-1);
y = data(:, end);
yu = unique(y);
yn = length(yu);
Y = zeros(length(y), yn);
for k = 1: yn
    Y(:, k) = (y == yu(k));
end
% ############################

% 按列归一化
for k = 1: size(X, 2)
    X(:, k) = (X(:, k) - mean(X(:, k))) / (max(X(:, k)) - min(X(:, k)));
end

Xtrain = [X(1: 40, :); X(51: 90, :); X(101:140, :)];
Ytrain = [Y(1: 40, :); Y(51: 90, :); Y(101: 140, :)];
Xtest = [X(41: 50, :); X(91: 100, :); X(141: 150, :)];
Ytest = [Y(41: 50, :); Y(91: 100, :); Y(141: 150, :)];

% 创建一个神经网络
% 隐藏层数为: hidden_layer 
% 连接方式为: 全连接
% 激活单元为: sigmoid
% 隐藏层单元数为: 输入层单元数
% 误差为: 均方误差
% 误差传递方式: 反向传播
% 优化算法: 梯度下降
% 数据读取方式: 每一行为一组数据
% 正则化项: l2

% 隐藏层
hiddenLayer = 2;
% 学习率, 其长度决定迭代次数
Alpha{1} = 10 * ones(500, 1);
Alpha{2} = 50: -0.1: 0.1;
% 正则化参数
lambda = 0.1;
% 训练误差
Jtrain = [];
% 测试误差
Jtest = [];

[W, P] = initWeight(Xtrain, Ytrain, hiddenLayer);
A = fp(Xtrain, W);

color = '.rgbk';
for ia = 1: length(Alpha)
    alpha = Alpha{ia};
    iter = length(alpha);
    for k = 1: iter
        P = bp(A, W, Ytrain, lambda);
        W = gradDesc(alpha(k), W, P);
        A = fp(Xtrain, W);
        Jtrain(k) = cost(A{hiddenLayer + 2}, Ytrain, W, lambda);
        B = fp(Xtest, W);
        Jtest(k) = cost(B{hiddenLayer + 2}, Ytest, W, lambda);
    end
    HatY = B{hiddenLayer + 2};
    % 显示结果, matlab 可以用 vec2ind
    [value, index] = max(HatY');
    disp(index);
    plot(Jtrain, color(ia * 2)); hold on; plot(Jtest, color(ia * 2 + 1)); 
end

legend({'train(static)', 'test(static)', 'train(dynamic)', 'test(dynamic)'});
