{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "\n",
    "trace = False\n",
    "trace_forward = False\n",
    "\n",
    "class FC:\n",
    "    '''\n",
    "    This class is not thread safe.\n",
    "    '''\n",
    "    def __init__(self, in_num, out_num, lr = 0.1):\n",
    "        self._in_num = in_num\n",
    "        self._out_num = out_num\n",
    "        self.w = np.random.randn(in_num, out_num)\n",
    "        self.b = np.zeros((out_num, 1))\n",
    "        self.lr = lr\n",
    "    def _sigmoid(self, in_data):\n",
    "        return 1 / (1 + np.exp(-in_data))\n",
    "    def forward(self, in_data):\n",
    "        \n",
    "        self.topVal = self._sigmoid(np.dot(self.w.T, in_data) + self.b)\n",
    "        if trace_forward:\n",
    "            print '=== topVal {0} ==='.format(self.topVal.shape)\n",
    "            print self.topVal\n",
    "        self.bottomVal = in_data\n",
    "        return self.topVal\n",
    "    def backward(self, loss):\n",
    "        residual_z = loss * self.topVal * (1 - self.topVal)\n",
    "        grad_w = np.dot(self.bottomVal, residual_z.T)\n",
    "        grad_b = np.sum(residual_z)\n",
    "        self.w -= self.lr * grad_w\n",
    "        self.b -= self.lr * grad_b\n",
    "        residual_x = np.dot(self.w, residual_z)\n",
    "        if trace:\n",
    "            print '=== z {0}==='.format(residual_z.shape)\n",
    "            print residual_z\n",
    "            print '=== grad_w {0}==='.format(grad_w.shape)\n",
    "            print grad_w\n",
    "            print '=== grad_b {0}==='.format(grad_b.shape)\n",
    "            print grad_b\n",
    "            print '=== self.w {0}==='.format(self.w.shape)\n",
    "            print self.w\n",
    "            print '=== self.b {0} ==='.format(self.b.shape)\n",
    "            print self.b\n",
    "            print '=== residual {0} ==='.format(residual_x.shape)\n",
    "            print residual_x\n",
    "        return residual_x\n",
    "\n",
    "class SquareLoss:\n",
    "    '''\n",
    "    Same as above, not thread safe\n",
    "    '''\n",
    "    def forward(self, y, t):\n",
    "        self.loss = y - t\n",
    "        if trace:\n",
    "            print '=== Loss ==='.format(self.loss.shape)\n",
    "            print self.loss\n",
    "        return np.sum(self.loss * self.loss) /  self.loss.shape[1] / 2\n",
    "    def backward(self):\n",
    "        if trace:\n",
    "            print '=== loss {0} ==='.format(self.loss.shape)\n",
    "            print self.loss\n",
    "        return self.loss\n",
    "\n",
    "class Net:\n",
    "    def __init__(self, input_num=2, hidden_num=4, out_num=1, lr=0.1):\n",
    "        self.fc1 = FC(input_num, hidden_num, lr)\n",
    "        self.fc2 = FC(hidden_num, out_num, lr)\n",
    "        self.loss = SquareLoss()\n",
    "    def train(self, X, y): # X are arranged by col\n",
    "        for i in range(10000):\n",
    "            # forward step\n",
    "            layer1out = self.fc1.forward(X)\n",
    "            layer2out = self.fc2.forward(layer1out)\n",
    "            loss = self.loss.forward(layer2out, y)\n",
    "            if i % 1000 == 0:\n",
    "                print 'iter = {0}, loss ={1}'.format(i, loss)\n",
    "                print '=== Label vs Prediction ==='\n",
    "                print 't={0}'.format(y)\n",
    "                print 'y={0}'.format(layer2out)\n",
    "            # backward step\n",
    "            layer2loss = self.loss.backward()\n",
    "            layer1loss = self.fc2.backward(layer2loss)\n",
    "            saliency = self.fc1.backward(layer1loss)\n",
    "        layer1out = self.fc1.forward(X)\n",
    "        layer2out = self.fc2.forward(layer1out)\n",
    "        print '=== Final ==='\n",
    "        print 'X={0}'.format(X)\n",
    "        print 't={0}'.format(y)\n",
    "        print 'y={0}'.format(layer2out)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "iter = 0, loss =0.29837110876\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.75136507]\n",
      " [ 0.77292847]]\n",
      "iter = 1000, loss =0.00034637971269\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.02873778]\n",
      " [ 0.97151609]]\n",
      "iter = 2000, loss =0.000119529215903\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.0210022 ]\n",
      " [ 0.97913675]]\n",
      "iter = 3000, loss =6.0258461253e-05\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01781127]\n",
      " [ 0.98228631]]\n",
      "iter = 4000, loss =3.54968683217e-05\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01599555]\n",
      " [ 0.98407994]]\n",
      "iter = 5000, loss =2.27849062666e-05\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01480398]\n",
      " [ 0.98525747]]\n",
      "iter = 6000, loss =1.5445159009e-05\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01395575]\n",
      " [ 0.98609585]]\n",
      "iter = 7000, loss =1.08721488321e-05\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01331933]\n",
      " [ 0.98672489]]\n",
      "iter = 8000, loss =7.86715065902e-06\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01282401]\n",
      " [ 0.98721445]]\n",
      "iter = 9000, loss =5.81352036324e-06\n",
      "=== Label vs Prediction ===\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01242797]\n",
      " [ 0.98760584]]\n",
      "=== Final ===\n",
      "X=[[ 0.05]\n",
      " [ 0.1 ]]\n",
      "t=[[ 0.01]\n",
      " [ 0.99]]\n",
      "y=[[ 0.01210474]\n",
      " [ 0.98792522]]\n"
     ]
    }
   ],
   "source": [
    "# example from https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/\n",
    "X = np.array([[0.05, 0.1]]).T\n",
    "y = np.array([[0.01, 0.99]]).T\n",
    "\n",
    "net = Net(2,2,2,0.5)\n",
    "net.fc1.w = np.array([[.15,.25], [.2, .3]])\n",
    "net.fc1.b = np.array([[.35], [.35]])\n",
    "net.fc2.w = np.array([[.4,.5], [.45,.55]])\n",
    "net.fc2.b = np.array([[.6], [.6]])\n",
    "net.train(X,y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "iter = 0, loss =0.105256639066\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.40930536  0.4617139   0.36923076  0.4299025 ]]\n",
      "iter = 1000, loss =0.0229368486589\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.04445123  0.22684496  0.17747671  0.68605373]]\n",
      "iter = 2000, loss =0.00657594469044\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.01057127  0.11332809  0.11016211  0.83411794]]\n",
      "iter = 3000, loss =0.00322081318498\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00517544  0.07831654  0.07871461  0.88419737]]\n",
      "iter = 4000, loss =0.00201059297485\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00336374  0.06171018  0.0624756   0.90855558]]\n",
      "iter = 5000, loss =0.00142205310651\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00249895  0.05189239  0.05257126  0.92309992]]\n",
      "iter = 6000, loss =0.00108341055769\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00200067  0.04532728  0.04585262  0.93287134]]\n",
      "iter = 7000, loss =0.000866734887908\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00167856  0.04058314  0.04096262  0.9399489 ]]\n",
      "iter = 8000, loss =0.000717647908313\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00145369  0.03696819  0.0372232   0.94534786]]\n",
      "iter = 9000, loss =0.000609513241467\n",
      "=== Label vs Prediction ===\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00128784  0.03410575  0.03425751  0.94962473]]\n",
      "=== Final ===\n",
      "X=[[0 0 1 1]\n",
      " [0 1 0 1]]\n",
      "t=[[0 0 0 1]]\n",
      "y=[[ 0.00116042  0.03177232  0.03183889  0.95311123]]\n"
     ]
    }
   ],
   "source": [
    "# and operation\n",
    "X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]).T\n",
    "y = np.array([[0],[0],[0],[1]]).T\n",
    "\n",
    "net = Net(2,4,1,0.1)\n",
    "net.train(X,y)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}
