rzimmerdev commited on
Commit
82fdb01
1 Parent(s): bfbda3e

Added LeNet model implemented with PyTorch modules

Browse files
.github/workflows/learn-github-actions.yml CHANGED
@@ -1,5 +1,5 @@
1
  name: learn-github-actions
2
- run-name: ${{ github.actor }} is learning GitHub Actions
3
  on: [push]
4
  jobs:
5
  check-bats-version:
 
1
  name: learn-github-actions
2
+ run-name: ${{ github.actor }} has made changes
3
  on: [push]
4
  jobs:
5
  check-bats-version:
notebooks/functional.ipynb ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 79,
6
+ "metadata": {
7
+ "collapsed": true,
8
+ "pycharm": {
9
+ "name": "#%%\n"
10
+ }
11
+ },
12
+ "outputs": [],
13
+ "source": [
14
+ "from jax import numpy as jnp\n",
15
+ "from jax import jit, vmap"
16
+ ]
17
+ },
18
+ {
19
+ "cell_type": "code",
20
+ "execution_count": 22,
21
+ "outputs": [],
22
+ "source": [
23
+ "@jit\n",
24
+ "def sigmoid(x):\n",
25
+ " return 1 / (1 + jnp.exp(-1 * x))"
26
+ ],
27
+ "metadata": {
28
+ "collapsed": false,
29
+ "pycharm": {
30
+ "name": "#%%\n"
31
+ }
32
+ }
33
+ },
34
+ {
35
+ "cell_type": "code",
36
+ "execution_count": 75,
37
+ "outputs": [],
38
+ "source": [
39
+ "@jit\n",
40
+ "def relu(x):\n",
41
+ " return x * (x > 0)"
42
+ ],
43
+ "metadata": {
44
+ "collapsed": false,
45
+ "pycharm": {
46
+ "name": "#%%\n"
47
+ }
48
+ }
49
+ },
50
+ {
51
+ "cell_type": "code",
52
+ "execution_count": 98,
53
+ "outputs": [],
54
+ "source": [
55
+ "@jit\n",
56
+ "@vmap\n",
57
+ "def softmax(x):\n",
58
+ " \"\"\"\n",
59
+ " >>> jnp.sum(softmax(jnp.array([[1, 2, 4], [1, 2, 3], [1, 2, 3]])), axis=1)\n",
60
+ " DeviceArray([1., 1., 1.], dtype=float32)\n",
61
+ " \"\"\"\n",
62
+ " return jnp.exp(x) / jnp.sum(jnp.exp(x))"
63
+ ],
64
+ "metadata": {
65
+ "collapsed": false,
66
+ "pycharm": {
67
+ "name": "#%%\n"
68
+ }
69
+ }
70
+ },
71
+ {
72
+ "cell_type": "code",
73
+ "execution_count": null,
74
+ "outputs": [],
75
+ "source": [],
76
+ "metadata": {
77
+ "collapsed": false,
78
+ "pycharm": {
79
+ "name": "#%%\n"
80
+ }
81
+ }
82
+ }
83
+ ],
84
+ "metadata": {
85
+ "kernelspec": {
86
+ "display_name": "Python 3",
87
+ "language": "python",
88
+ "name": "python3"
89
+ },
90
+ "language_info": {
91
+ "codemirror_mode": {
92
+ "name": "ipython",
93
+ "version": 2
94
+ },
95
+ "file_extension": ".py",
96
+ "mimetype": "text/x-python",
97
+ "name": "python",
98
+ "nbconvert_exporter": "python",
99
+ "pygments_lexer": "ipython2",
100
+ "version": "2.7.6"
101
+ }
102
+ },
103
+ "nbformat": 4,
104
+ "nbformat_minor": 0
105
+ }
notebooks/model.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "metadata": {
7
  "collapsed": true,
8
  "pycharm": {
@@ -14,15 +14,82 @@
14
  "import random\n",
15
  "\n",
16
  "import numpy as np\n",
17
- "from jax import numpy as jnp"
 
 
18
  ]
19
  },
20
  {
21
  "cell_type": "code",
22
- "execution_count": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  "outputs": [],
24
  "source": [
25
- "class"
 
26
  ],
27
  "metadata": {
28
  "collapsed": false,
@@ -30,6 +97,72 @@
30
  "name": "#%%\n"
31
  }
32
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  }
34
  ],
35
  "metadata": {
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 3,
6
  "metadata": {
7
  "collapsed": true,
8
  "pycharm": {
 
14
  "import random\n",
15
  "\n",
16
  "import numpy as np\n",
17
+ "from src.functional import sigmoid\n",
18
+ "\n",
19
+ "from jax import grad\n"
20
  ]
21
  },
22
  {
23
  "cell_type": "code",
24
+ "execution_count": 106,
25
+ "outputs": [],
26
+ "source": [
27
+ "class DenseLayer:\n",
28
+ " def __init__(self, total_nodes, input_size, activation=sigmoid):\n",
29
+ " self.total_nodes = total_nodes\n",
30
+ " self.input_size = input_size\n",
31
+ " self.weights = np.random.rand(total_nodes, self.input_size + 1)\n",
32
+ " self.activation = activation\n",
33
+ "\n",
34
+ " def forward(self, x):\n",
35
+ " x_biased = np.concatenate((x, np.ones((len(x), 1))), axis=1)\n",
36
+ " y = self.weights @ x_biased.T\n",
37
+ " return self.activation(y)\n",
38
+ "\n",
39
+ " def backprop(self, gradient):\n",
40
+ " "
41
+ ],
42
+ "metadata": {
43
+ "collapsed": false,
44
+ "pycharm": {
45
+ "name": "#%%\n"
46
+ }
47
+ }
48
+ },
49
+ {
50
+ "cell_type": "code",
51
+ "execution_count": 107,
52
+ "outputs": [],
53
+ "source": [
54
+ "l = DenseLayer(5, 5)"
55
+ ],
56
+ "metadata": {
57
+ "collapsed": false,
58
+ "pycharm": {
59
+ "name": "#%%\n"
60
+ }
61
+ }
62
+ },
63
+ {
64
+ "cell_type": "code",
65
+ "execution_count": 108,
66
+ "outputs": [
67
+ {
68
+ "data": {
69
+ "text/plain": "Array([[0.7291562 ],\n [0.84321564],\n [0.8657799 ],\n [0.8525716 ],\n [0.89164424]], dtype=float32)"
70
+ },
71
+ "execution_count": 108,
72
+ "metadata": {},
73
+ "output_type": "execute_result"
74
+ }
75
+ ],
76
+ "source": [
77
+ "l.forward(np.random.rand(1, 5))"
78
+ ],
79
+ "metadata": {
80
+ "collapsed": false,
81
+ "pycharm": {
82
+ "name": "#%%\n"
83
+ }
84
+ }
85
+ },
86
+ {
87
+ "cell_type": "code",
88
+ "execution_count": 2,
89
  "outputs": [],
90
  "source": [
91
+ "import torch\n",
92
+ "from torch import nn"
93
  ],
94
  "metadata": {
95
  "collapsed": false,
 
97
  "name": "#%%\n"
98
  }
99
  }
100
+ },
101
+ {
102
+ "cell_type": "code",
103
+ "execution_count": 4,
104
+ "outputs": [],
105
+ "source": [
106
+ "class CNN(nn.Module):\n",
107
+ " def __init__(self, input_channels, num_classes):\n",
108
+ " super().__init__()\n",
109
+ "\n",
110
+ " self.feature_layers = [input_channels, 6, 16, 120]\n",
111
+ " self.kernels = [5, 5, 5]\n",
112
+ " self.pools = [2, 2]\n",
113
+ " self.feature_activations = [nn.Tanh for _ in range(len(self.channels) - 1)]\n",
114
+ "\n",
115
+ " self.classifier_layers = [120, num_classes]\n",
116
+ " self.classifier_activations = [nn.Tanh for _ in range(len(self.classifier_layers))]\n",
117
+ "\n",
118
+ " feature_layers = []\n",
119
+ " for idx, layer in enumerate(list(zip(self.feature_layers[:-1], self.feature_layers[1:]))):\n",
120
+ " feature_layers.append(\n",
121
+ " nn.Conv2d(in_channels=layer[0], out_channels=layer[1], kernel_size=self.kernels[idx])\n",
122
+ " )\n",
123
+ " feature_layers.append(self.feature_activations[idx])\n",
124
+ "\n",
125
+ " if idx != len(self.feature_activations):\n",
126
+ " feature_layers.append(nn.MaxPool2d(kernel_size=self.pools[2]))\n",
127
+ "\n",
128
+ "\n",
129
+ " classifier_layers = []\n",
130
+ " for idx, layer in enumerate(list(zip(self.classifier_layers[:-1], self.classifier_layers[1:]))):\n",
131
+ " classifier_layers.append(\n",
132
+ " nn.Linear(in_features=layer[0], out_features=layer[1])\n",
133
+ " )\n",
134
+ "\n",
135
+ " if idx != len(self.classifier_activations):\n",
136
+ " classifier_layers.append(self.classifier_activations[idx])\n",
137
+ "\n",
138
+ "\n",
139
+ " self.feature_extractor = nn.Sequential(*feature_layers)\n",
140
+ " self.classifier = nn.Sequential(*classifier_layers)\n",
141
+ "\n",
142
+ " def forward(self, x):\n",
143
+ " x = self.feature_extractor(x)\n",
144
+ " y = self.classifier_layers(torch.flatten(x, 1))\n",
145
+ " p = nn.functional.softmax(y, dim=1)\n",
146
+ " return y, p"
147
+ ],
148
+ "metadata": {
149
+ "collapsed": false,
150
+ "pycharm": {
151
+ "name": "#%%\n"
152
+ }
153
+ }
154
+ },
155
+ {
156
+ "cell_type": "code",
157
+ "execution_count": null,
158
+ "outputs": [],
159
+ "source": [],
160
+ "metadata": {
161
+ "collapsed": false,
162
+ "pycharm": {
163
+ "name": "#%%\n"
164
+ }
165
+ }
166
  }
167
  ],
168
  "metadata": {
src/models.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+ import torch
4
+ from torch import nn
5
+
6
+
7
+ class CNN(nn.Module):
8
+ def __init__(self, input_channels, num_classes):
9
+ super().__init__()
10
+
11
+ self.feature_layers = [input_channels, 6, 16]
12
+ self.kernels = [5, 5]
13
+ self.pools = [2, 2]
14
+ self.feature_activations = [nn.ReLU for _ in range(len(self.feature_layers) - 1)]
15
+
16
+ self.classifier_layers = [400, 120, 84, num_classes]
17
+ self.classifier_activations = [nn.ReLU for _ in range(len(self.classifier_layers) - 1)]
18
+
19
+ feature_layers = []
20
+ for idx, layer in enumerate(list(zip(self.feature_layers[:-1], self.feature_layers[1:]))):
21
+ feature_layers.append(
22
+ nn.Conv2d(in_channels=layer[0], out_channels=layer[1],
23
+ kernel_size=self.kernels[idx], padding=2 if idx == 0 else 0)
24
+ )
25
+ feature_layers.append(self.feature_activations[idx]())
26
+ feature_layers.append(nn.MaxPool2d(kernel_size=self.pools[idx]))
27
+
28
+ classifier_layers = []
29
+ for idx, layer in enumerate(list(zip(self.classifier_layers[:-1], self.classifier_layers[1:]))):
30
+ classifier_layers.append(
31
+ nn.Linear(in_features=layer[0], out_features=layer[1])
32
+ )
33
+ classifier_layers.append(self.classifier_activations[idx]())
34
+
35
+ self.feature_extractor = nn.Sequential(*feature_layers)
36
+ self.classifier = nn.Sequential(*classifier_layers)
37
+
38
+ def forward(self, x):
39
+ x = self.feature_extractor(x)
40
+ y = self.classifier(torch.flatten(x))
41
+ return y