Spaces:
Runtime error
Runtime error
carlfeynman
commited on
Commit
•
53075d2
1
Parent(s):
0bbec58
ipynb to py conversion process added
Browse files- mnist.ipynb +26 -7
- mnist.py +1 -145
mnist.ipynb
CHANGED
@@ -145,6 +145,7 @@
|
|
145 |
"metadata": {},
|
146 |
"outputs": [],
|
147 |
"source": [
|
|
|
148 |
"def linear_classifier():\n",
|
149 |
" return nn.Sequential(\n",
|
150 |
" Reshape((-1, 784)),\n",
|
@@ -209,7 +210,11 @@
|
|
209 |
{
|
210 |
"cell_type": "code",
|
211 |
"execution_count": 31,
|
212 |
-
"metadata": {
|
|
|
|
|
|
|
|
|
213 |
"outputs": [],
|
214 |
"source": [
|
215 |
"# with open('./linear_classifier.pkl', 'wb') as model_file:\n",
|
@@ -218,33 +223,47 @@
|
|
218 |
},
|
219 |
{
|
220 |
"cell_type": "markdown",
|
221 |
-
"metadata": {
|
|
|
|
|
|
|
|
|
222 |
"source": [
|
223 |
"#### commit to .py file for deployment"
|
224 |
]
|
225 |
},
|
226 |
{
|
227 |
"cell_type": "code",
|
228 |
-
"execution_count":
|
229 |
-
"metadata": {
|
|
|
|
|
|
|
|
|
230 |
"outputs": [
|
231 |
{
|
232 |
"name": "stdout",
|
233 |
"output_type": "stream",
|
234 |
"text": [
|
235 |
"[NbConvertApp] Converting notebook mnist.ipynb to script\n",
|
236 |
-
"[NbConvertApp] Writing
|
237 |
]
|
238 |
}
|
239 |
],
|
240 |
"source": [
|
241 |
-
"!jupyter nbconvert --to script mnist.ipynb"
|
|
|
|
|
242 |
]
|
243 |
},
|
244 |
{
|
245 |
"cell_type": "code",
|
246 |
"execution_count": null,
|
247 |
-
"metadata": {
|
|
|
|
|
|
|
|
|
248 |
"outputs": [],
|
249 |
"source": [
|
250 |
"# from IPython.display import HTML, display, Image\n",
|
|
|
145 |
"metadata": {},
|
146 |
"outputs": [],
|
147 |
"source": [
|
148 |
+
"# model definition\n",
|
149 |
"def linear_classifier():\n",
|
150 |
" return nn.Sequential(\n",
|
151 |
" Reshape((-1, 784)),\n",
|
|
|
210 |
{
|
211 |
"cell_type": "code",
|
212 |
"execution_count": 31,
|
213 |
+
"metadata": {
|
214 |
+
"tags": [
|
215 |
+
"exclude"
|
216 |
+
]
|
217 |
+
},
|
218 |
"outputs": [],
|
219 |
"source": [
|
220 |
"# with open('./linear_classifier.pkl', 'wb') as model_file:\n",
|
|
|
223 |
},
|
224 |
{
|
225 |
"cell_type": "markdown",
|
226 |
+
"metadata": {
|
227 |
+
"tags": [
|
228 |
+
"exclude"
|
229 |
+
]
|
230 |
+
},
|
231 |
"source": [
|
232 |
"#### commit to .py file for deployment"
|
233 |
]
|
234 |
},
|
235 |
{
|
236 |
"cell_type": "code",
|
237 |
+
"execution_count": 43,
|
238 |
+
"metadata": {
|
239 |
+
"tags": [
|
240 |
+
"exclude"
|
241 |
+
]
|
242 |
+
},
|
243 |
"outputs": [
|
244 |
{
|
245 |
"name": "stdout",
|
246 |
"output_type": "stream",
|
247 |
"text": [
|
248 |
"[NbConvertApp] Converting notebook mnist.ipynb to script\n",
|
249 |
+
"[NbConvertApp] Writing 3123 bytes to mnist.py\n"
|
250 |
]
|
251 |
}
|
252 |
],
|
253 |
"source": [
|
254 |
+
"# !jupyter nbconvert --to script mnist.ipynb\n",
|
255 |
+
"!jupyter nbconvert --to script --TagRemovePreprocessor.remove_cell_tags=\"exclude\" --TemplateExporter.exclude_input_prompt=True mnist.ipynb\n",
|
256 |
+
"\n"
|
257 |
]
|
258 |
},
|
259 |
{
|
260 |
"cell_type": "code",
|
261 |
"execution_count": null,
|
262 |
+
"metadata": {
|
263 |
+
"tags": [
|
264 |
+
"exclude"
|
265 |
+
]
|
266 |
+
},
|
267 |
"outputs": [],
|
268 |
"source": [
|
269 |
"# from IPython.display import HTML, display, Image\n",
|
mnist.py
CHANGED
@@ -1,9 +1,6 @@
|
|
1 |
#!/usr/bin/env python
|
2 |
# coding: utf-8
|
3 |
|
4 |
-
# In[1]:
|
5 |
-
|
6 |
-
|
7 |
import torch
|
8 |
from torch import nn
|
9 |
import torch.nn.functional as F
|
@@ -19,17 +16,11 @@ get_ipython().run_line_magic('matplotlib', 'inline')
|
|
19 |
plt.rcParams['figure.figsize'] = [2, 2]
|
20 |
|
21 |
|
22 |
-
# In[2]:
|
23 |
-
|
24 |
-
|
25 |
dataset_nm = 'mnist'
|
26 |
x,y = 'image', 'label'
|
27 |
ds = load_dataset(dataset_nm)
|
28 |
|
29 |
|
30 |
-
# In[3]:
|
31 |
-
|
32 |
-
|
33 |
def transform_ds(b):
|
34 |
b[x] = [TF.to_tensor(ele) for ele in b[x]]
|
35 |
return b
|
@@ -38,9 +29,6 @@ dst = ds.with_transform(transform_ds)
|
|
38 |
plt.imshow(dst['train'][0]['image'].permute(1,2,0));
|
39 |
|
40 |
|
41 |
-
# In[4]:
|
42 |
-
|
43 |
-
|
44 |
bs = 1024
|
45 |
class DataLoaders:
|
46 |
def __init__(self, train_ds, valid_ds, bs, collate_fn, **kwargs):
|
@@ -56,9 +44,6 @@ xb,yb = next(iter(dls.train))
|
|
56 |
xb.shape, yb.shape
|
57 |
|
58 |
|
59 |
-
# In[5]:
|
60 |
-
|
61 |
-
|
62 |
class Reshape(nn.Module):
|
63 |
def __init__(self, dim):
|
64 |
super().__init__()
|
@@ -68,9 +53,6 @@ class Reshape(nn.Module):
|
|
68 |
return x.reshape(self.dim)
|
69 |
|
70 |
|
71 |
-
# In[6]:
|
72 |
-
|
73 |
-
|
74 |
def cnn_classifier():
|
75 |
ks,stride = 3,2
|
76 |
return nn.Sequential(
|
@@ -89,9 +71,7 @@ def cnn_classifier():
|
|
89 |
)
|
90 |
|
91 |
|
92 |
-
#
|
93 |
-
|
94 |
-
|
95 |
def linear_classifier():
|
96 |
return nn.Sequential(
|
97 |
Reshape((-1, 784)),
|
@@ -103,9 +83,6 @@ def linear_classifier():
|
|
103 |
)
|
104 |
|
105 |
|
106 |
-
# In[8]:
|
107 |
-
|
108 |
-
|
109 |
model = linear_classifier()
|
110 |
lr = 0.1
|
111 |
max_lr = 0.1
|
@@ -133,128 +110,7 @@ for epoch in range(epochs):
|
|
133 |
|
134 |
|
135 |
|
136 |
-
# In[31]:
|
137 |
-
|
138 |
-
|
139 |
-
# with open('./linear_classifier.pkl', 'wb') as model_file:
|
140 |
-
# pickle.dump(model, model_file)
|
141 |
-
|
142 |
-
|
143 |
-
# #### commit to .py file for deployment
|
144 |
-
|
145 |
-
# In[32]:
|
146 |
-
|
147 |
-
|
148 |
-
get_ipython().system('jupyter nbconvert --to script mnist.ipynb')
|
149 |
-
|
150 |
-
|
151 |
-
# In[29]:
|
152 |
-
|
153 |
-
|
154 |
-
get_ipython().system('pip3 install markupsafe')
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
# In[ ]:
|
159 |
-
|
160 |
-
|
161 |
-
# from IPython.display import HTML, display, Image
|
162 |
-
# %%html
|
163 |
-
# <style>
|
164 |
-
# #whiteboard {
|
165 |
-
# border: 3px solid black;
|
166 |
-
# border-radius: 6px;
|
167 |
-
# background-color: #FFFFFF;
|
168 |
-
# }
|
169 |
-
# #capture-button {
|
170 |
-
# background-color: #3F52D9;
|
171 |
-
# color: white;
|
172 |
-
# border: none;
|
173 |
-
# padding: 10px 20px;
|
174 |
-
# cursor: pointer;
|
175 |
-
# font-size: 16px;
|
176 |
-
# border-radius: 3px;
|
177 |
-
# margin-top: 10px;
|
178 |
-
# width: 190px;
|
179 |
-
# margin-right: 20px;
|
180 |
-
# }
|
181 |
-
# #clear-button {
|
182 |
-
# background-color: #FF0000,;
|
183 |
-
# color: black;
|
184 |
-
# border: none;
|
185 |
-
# padding: 10px 20px;
|
186 |
-
# cursor: pointer;
|
187 |
-
# font-size: 16px;
|
188 |
-
# border-radius: 3px;
|
189 |
-
# margin-top: 10px;
|
190 |
-
# width: 190px;
|
191 |
-
# }
|
192 |
-
# #container {
|
193 |
-
# display: flex;
|
194 |
-
# flex-direction: column; /* Arrange children vertically */
|
195 |
-
# align-items: center; /* Center horizontally */
|
196 |
-
# justify-content: center;
|
197 |
-
# }
|
198 |
-
# #btn-container {
|
199 |
-
# display: flex;
|
200 |
-
# flex-direction: row; /* Arrange children vertically */
|
201 |
-
# align-items: center; /* Center horizontally */
|
202 |
-
# }
|
203 |
-
|
204 |
-
# </style>
|
205 |
-
# <div id='container'>
|
206 |
-
# <canvas id="whiteboard" width="400" height="200" fill_rect='white'></canvas>
|
207 |
-
# <div id='btn-container'>
|
208 |
-
# <button id="capture-button">Predict</button>
|
209 |
-
# <button id="clear-button">Clear</button>
|
210 |
-
# </div>
|
211 |
-
|
212 |
-
# </div>
|
213 |
-
# <script>
|
214 |
-
# var canvas = document.getElementById('whiteboard');
|
215 |
-
# var context = canvas.getContext('2d');
|
216 |
-
# var drawing = false;
|
217 |
-
# canvas.addEventListener('mousedown', function (e) {
|
218 |
-
# drawing = true;
|
219 |
-
# context.beginPath();
|
220 |
-
# context.moveTo(e.clientX - canvas.getBoundingClientRect().left, e.clientY - canvas.getBoundingClientRect().top);
|
221 |
-
# });
|
222 |
-
# canvas.addEventListener('mousemove', function (e) {
|
223 |
-
# if (drawing) {
|
224 |
-
# context.lineTo(e.clientX - canvas.getBoundingClientRect().left, e.clientY - canvas.getBoundingClientRect().top);
|
225 |
-
# context.stroke();
|
226 |
-
# }
|
227 |
-
# });
|
228 |
-
# canvas.addEventListener('mouseup', function () {
|
229 |
-
# drawing = false;
|
230 |
-
# });
|
231 |
-
# canvas.addEventListener('mouseout', function () {
|
232 |
-
# drawing = false;
|
233 |
-
# });
|
234 |
-
|
235 |
-
# var clearButton = document.getElementById('clear-button');
|
236 |
-
# clearButton.addEventListener('click', function () {
|
237 |
-
# context.clearRect(0, 0, canvas.width, canvas.height);
|
238 |
-
# });
|
239 |
-
|
240 |
-
# var captureButton = document.getElementById('capture-button');
|
241 |
-
# captureButton.addEventListener('click', function () {
|
242 |
-
# // Convert the canvas content to a data URL (image)
|
243 |
-
# var imageData = canvas.toDataURL("image/png");
|
244 |
-
|
245 |
-
# // Send the image data to the Jupyter kernel variable
|
246 |
-
# IPython.notebook.kernel.execute('image_data = "' + imageData + '"');
|
247 |
-
# });
|
248 |
-
# </script>
|
249 |
-
|
250 |
-
|
251 |
-
# In[ ]:
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
|
257 |
-
# In[ ]:
|
258 |
|
259 |
|
260 |
|
|
|
1 |
#!/usr/bin/env python
|
2 |
# coding: utf-8
|
3 |
|
|
|
|
|
|
|
4 |
import torch
|
5 |
from torch import nn
|
6 |
import torch.nn.functional as F
|
|
|
16 |
plt.rcParams['figure.figsize'] = [2, 2]
|
17 |
|
18 |
|
|
|
|
|
|
|
19 |
dataset_nm = 'mnist'
|
20 |
x,y = 'image', 'label'
|
21 |
ds = load_dataset(dataset_nm)
|
22 |
|
23 |
|
|
|
|
|
|
|
24 |
def transform_ds(b):
|
25 |
b[x] = [TF.to_tensor(ele) for ele in b[x]]
|
26 |
return b
|
|
|
29 |
plt.imshow(dst['train'][0]['image'].permute(1,2,0));
|
30 |
|
31 |
|
|
|
|
|
|
|
32 |
bs = 1024
|
33 |
class DataLoaders:
|
34 |
def __init__(self, train_ds, valid_ds, bs, collate_fn, **kwargs):
|
|
|
44 |
xb.shape, yb.shape
|
45 |
|
46 |
|
|
|
|
|
|
|
47 |
class Reshape(nn.Module):
|
48 |
def __init__(self, dim):
|
49 |
super().__init__()
|
|
|
53 |
return x.reshape(self.dim)
|
54 |
|
55 |
|
|
|
|
|
|
|
56 |
def cnn_classifier():
|
57 |
ks,stride = 3,2
|
58 |
return nn.Sequential(
|
|
|
71 |
)
|
72 |
|
73 |
|
74 |
+
# model definition
|
|
|
|
|
75 |
def linear_classifier():
|
76 |
return nn.Sequential(
|
77 |
Reshape((-1, 784)),
|
|
|
83 |
)
|
84 |
|
85 |
|
|
|
|
|
|
|
86 |
model = linear_classifier()
|
87 |
lr = 0.1
|
88 |
max_lr = 0.1
|
|
|
110 |
|
111 |
|
112 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
113 |
|
|
|
114 |
|
115 |
|
116 |
|