# coding=utf-8
import numpy as np


def sigmod_derivate(x):
    return x * (1 - x)


def sigmoid(x):
    return 1.0 / (1.0 + np.math.exp(-x))


class Perceptron:
    output_num = 1  # 输出节点个数
    weights = [[0], [0]]
    bias = 1  # 偏置
    learn_rate = 0.05  # 学习率
    max_iteration = 1000

    def predict(self, inputs):
        # print '输入值：',
        # print inputs

        result = np.dot(inputs, self.weights) + self.bias
        # print result
        return sigmoid(result)

    def train(self):
        train_data = np.loadtxt('perceptron_train1.data')

        for iter in range(1, self.max_iteration):
            for row in train_data:
                print '---------------------------------------------------------'
                x = row[:2]
                y = row[2]
                out = self.predict(x)

                print '更新值：',
                for x_index, xi in enumerate(x):
                    update = self.learn_rate * (y - out) * xi
                    print update,
                    self.weights[x_index] += update
                print
                self.bias += self.learn_rate * (y - out)
                print '新权重',
                print self.weights


if __name__ == '__main__':
    perceptron = Perceptron()
    perceptron.train()
    test_data = [0, 1]
    result = perceptron.predict(test_data)
    print
    print '最终结果：' + str(result)
