Spaces:
Runtime error
Runtime error
Re-export app.ipynb
Browse files- app.ipynb +36 -110
- app.py +2 -1
- app/app.py +30 -0
app.ipynb
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"id": "6c6b9b04",
|
7 |
"metadata": {},
|
8 |
"outputs": [],
|
@@ -20,7 +20,7 @@
|
|
20 |
},
|
21 |
{
|
22 |
"cell_type": "code",
|
23 |
-
"execution_count":
|
24 |
"id": "acbde237",
|
25 |
"metadata": {},
|
26 |
"outputs": [],
|
@@ -36,7 +36,7 @@
|
|
36 |
},
|
37 |
{
|
38 |
"cell_type": "code",
|
39 |
-
"execution_count":
|
40 |
"id": "80b17561",
|
41 |
"metadata": {},
|
42 |
"outputs": [
|
@@ -47,7 +47,7 @@
|
|
47 |
"PILImage mode=RGB size=192x191"
|
48 |
]
|
49 |
},
|
50 |
-
"execution_count":
|
51 |
"metadata": {},
|
52 |
"output_type": "execute_result"
|
53 |
}
|
@@ -60,7 +60,7 @@
|
|
60 |
},
|
61 |
{
|
62 |
"cell_type": "code",
|
63 |
-
"execution_count":
|
64 |
"id": "a420c99a",
|
65 |
"metadata": {},
|
66 |
"outputs": [],
|
@@ -71,7 +71,7 @@
|
|
71 |
},
|
72 |
{
|
73 |
"cell_type": "code",
|
74 |
-
"execution_count":
|
75 |
"id": "034717d6",
|
76 |
"metadata": {},
|
77 |
"outputs": [
|
@@ -118,7 +118,7 @@
|
|
118 |
"('False', tensor(0), tensor([9.9960e-01, 4.0038e-04]))"
|
119 |
]
|
120 |
},
|
121 |
-
"execution_count":
|
122 |
"metadata": {},
|
123 |
"output_type": "execute_result"
|
124 |
}
|
@@ -129,7 +129,7 @@
|
|
129 |
},
|
130 |
{
|
131 |
"cell_type": "code",
|
132 |
-
"execution_count":
|
133 |
"id": "a95b779b",
|
134 |
"metadata": {},
|
135 |
"outputs": [],
|
@@ -144,7 +144,7 @@
|
|
144 |
},
|
145 |
{
|
146 |
"cell_type": "code",
|
147 |
-
"execution_count":
|
148 |
"id": "ca6899b3",
|
149 |
"metadata": {},
|
150 |
"outputs": [
|
@@ -191,7 +191,7 @@
|
|
191 |
"{'Dog': 0.9995996356010437, 'Cat': 0.00040037668077275157}"
|
192 |
]
|
193 |
},
|
194 |
-
"execution_count":
|
195 |
"metadata": {},
|
196 |
"output_type": "execute_result"
|
197 |
}
|
@@ -202,7 +202,7 @@
|
|
202 |
},
|
203 |
{
|
204 |
"cell_type": "code",
|
205 |
-
"execution_count":
|
206 |
"id": "0990842c",
|
207 |
"metadata": {},
|
208 |
"outputs": [
|
@@ -224,7 +224,7 @@
|
|
224 |
"name": "stdout",
|
225 |
"output_type": "stream",
|
226 |
"text": [
|
227 |
-
"Running on local URL: http://127.0.0.1:
|
228 |
"\n",
|
229 |
"To create a public link, set `share=True` in `launch()`.\n"
|
230 |
]
|
@@ -233,83 +233,33 @@
|
|
233 |
"data": {
|
234 |
"text/plain": []
|
235 |
},
|
236 |
-
"execution_count":
|
237 |
"metadata": {},
|
238 |
"output_type": "execute_result"
|
239 |
-
}
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
" }\n",
|
258 |
-
"</style>\n"
|
259 |
-
],
|
260 |
-
"text/plain": [
|
261 |
-
"<IPython.core.display.HTML object>"
|
262 |
-
]
|
263 |
-
},
|
264 |
-
"metadata": {},
|
265 |
-
"output_type": "display_data"
|
266 |
-
},
|
267 |
-
{
|
268 |
-
"data": {
|
269 |
-
"text/html": [],
|
270 |
-
"text/plain": [
|
271 |
-
"<IPython.core.display.HTML object>"
|
272 |
-
]
|
273 |
-
},
|
274 |
-
"metadata": {},
|
275 |
-
"output_type": "display_data"
|
276 |
-
},
|
277 |
-
{
|
278 |
-
"data": {
|
279 |
-
"text/html": [
|
280 |
-
"\n",
|
281 |
-
"<style>\n",
|
282 |
-
" /* Turns off some styling */\n",
|
283 |
-
" progress {\n",
|
284 |
-
" /* gets rid of default border in Firefox and Opera. */\n",
|
285 |
-
" border: none;\n",
|
286 |
-
" /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
|
287 |
-
" background-size: auto;\n",
|
288 |
-
" }\n",
|
289 |
-
" progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
|
290 |
-
" background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
|
291 |
-
" }\n",
|
292 |
-
" .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
|
293 |
-
" background: #F44336;\n",
|
294 |
-
" }\n",
|
295 |
-
"</style>\n"
|
296 |
-
],
|
297 |
-
"text/plain": [
|
298 |
-
"<IPython.core.display.HTML object>"
|
299 |
-
]
|
300 |
-
},
|
301 |
-
"metadata": {},
|
302 |
-
"output_type": "display_data"
|
303 |
-
},
|
304 |
{
|
305 |
-
"
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
},
|
311 |
-
"metadata": {},
|
312 |
-
"output_type": "display_data"
|
313 |
},
|
314 |
{
|
315 |
"data": {
|
@@ -349,30 +299,6 @@
|
|
349 |
"output_type": "display_data"
|
350 |
}
|
351 |
],
|
352 |
-
"source": [
|
353 |
-
"#|export\n",
|
354 |
-
"image = gr.inputs.Image(shape=(192, 192))\n",
|
355 |
-
"label = gr.outputs.Label()\n",
|
356 |
-
"examples = ['dog.jpeg', 'cat.jpeg', 'ooconfuse.jpeg']\n",
|
357 |
-
"\n",
|
358 |
-
"intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)\n",
|
359 |
-
"intf.launch(inline=False)"
|
360 |
-
]
|
361 |
-
},
|
362 |
-
{
|
363 |
-
"cell_type": "code",
|
364 |
-
"execution_count": 2,
|
365 |
-
"id": "4b9e1641",
|
366 |
-
"metadata": {},
|
367 |
-
"outputs": [
|
368 |
-
{
|
369 |
-
"name": "stdout",
|
370 |
-
"output_type": "stream",
|
371 |
-
"text": [
|
372 |
-
"Export successful\n"
|
373 |
-
]
|
374 |
-
}
|
375 |
-
],
|
376 |
"source": [
|
377 |
"import nbdev\n",
|
378 |
"nbdev.export.nb_export('app.ipynb', 'app')\n",
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 3,
|
6 |
"id": "6c6b9b04",
|
7 |
"metadata": {},
|
8 |
"outputs": [],
|
|
|
20 |
},
|
21 |
{
|
22 |
"cell_type": "code",
|
23 |
+
"execution_count": 4,
|
24 |
"id": "acbde237",
|
25 |
"metadata": {},
|
26 |
"outputs": [],
|
|
|
36 |
},
|
37 |
{
|
38 |
"cell_type": "code",
|
39 |
+
"execution_count": 5,
|
40 |
"id": "80b17561",
|
41 |
"metadata": {},
|
42 |
"outputs": [
|
|
|
47 |
"PILImage mode=RGB size=192x191"
|
48 |
]
|
49 |
},
|
50 |
+
"execution_count": 5,
|
51 |
"metadata": {},
|
52 |
"output_type": "execute_result"
|
53 |
}
|
|
|
60 |
},
|
61 |
{
|
62 |
"cell_type": "code",
|
63 |
+
"execution_count": 6,
|
64 |
"id": "a420c99a",
|
65 |
"metadata": {},
|
66 |
"outputs": [],
|
|
|
71 |
},
|
72 |
{
|
73 |
"cell_type": "code",
|
74 |
+
"execution_count": 7,
|
75 |
"id": "034717d6",
|
76 |
"metadata": {},
|
77 |
"outputs": [
|
|
|
118 |
"('False', tensor(0), tensor([9.9960e-01, 4.0038e-04]))"
|
119 |
]
|
120 |
},
|
121 |
+
"execution_count": 7,
|
122 |
"metadata": {},
|
123 |
"output_type": "execute_result"
|
124 |
}
|
|
|
129 |
},
|
130 |
{
|
131 |
"cell_type": "code",
|
132 |
+
"execution_count": 8,
|
133 |
"id": "a95b779b",
|
134 |
"metadata": {},
|
135 |
"outputs": [],
|
|
|
144 |
},
|
145 |
{
|
146 |
"cell_type": "code",
|
147 |
+
"execution_count": 9,
|
148 |
"id": "ca6899b3",
|
149 |
"metadata": {},
|
150 |
"outputs": [
|
|
|
191 |
"{'Dog': 0.9995996356010437, 'Cat': 0.00040037668077275157}"
|
192 |
]
|
193 |
},
|
194 |
+
"execution_count": 9,
|
195 |
"metadata": {},
|
196 |
"output_type": "execute_result"
|
197 |
}
|
|
|
202 |
},
|
203 |
{
|
204 |
"cell_type": "code",
|
205 |
+
"execution_count": 10,
|
206 |
"id": "0990842c",
|
207 |
"metadata": {},
|
208 |
"outputs": [
|
|
|
224 |
"name": "stdout",
|
225 |
"output_type": "stream",
|
226 |
"text": [
|
227 |
+
"Running on local URL: http://127.0.0.1:7860\n",
|
228 |
"\n",
|
229 |
"To create a public link, set `share=True` in `launch()`.\n"
|
230 |
]
|
|
|
233 |
"data": {
|
234 |
"text/plain": []
|
235 |
},
|
236 |
+
"execution_count": 10,
|
237 |
"metadata": {},
|
238 |
"output_type": "execute_result"
|
239 |
+
}
|
240 |
+
],
|
241 |
+
"source": [
|
242 |
+
"#|export\n",
|
243 |
+
"image = gr.inputs.Image(shape=(192, 192))\n",
|
244 |
+
"label = gr.outputs.Label()\n",
|
245 |
+
"examples = ['dog.jpeg', 'cat.jpeg', 'ooconfuse.jpeg']\n",
|
246 |
+
"\n",
|
247 |
+
"intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)\n",
|
248 |
+
"intf.launch(inline=False)"
|
249 |
+
]
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"cell_type": "code",
|
253 |
+
"execution_count": 11,
|
254 |
+
"id": "4b9e1641",
|
255 |
+
"metadata": {},
|
256 |
+
"outputs": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
257 |
{
|
258 |
+
"name": "stdout",
|
259 |
+
"output_type": "stream",
|
260 |
+
"text": [
|
261 |
+
"Export successful\n"
|
262 |
+
]
|
|
|
|
|
|
|
263 |
},
|
264 |
{
|
265 |
"data": {
|
|
|
299 |
"output_type": "display_data"
|
300 |
}
|
301 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
302 |
"source": [
|
303 |
"import nbdev\n",
|
304 |
"nbdev.export.nb_export('app.ipynb', 'app')\n",
|
app.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1 |
# AUTOGENERATED! DO NOT EDIT! File to edit: ../app.ipynb.
|
2 |
|
3 |
# %% auto 0
|
4 |
-
__all__ = ['learn', 'categories', 'image', 'label', 'examples', 'intf', '
|
5 |
|
6 |
# %% ../app.ipynb 2
|
|
|
7 |
from fastai.vision.all import *
|
8 |
import gradio as gr
|
9 |
|
|
|
1 |
# AUTOGENERATED! DO NOT EDIT! File to edit: ../app.ipynb.
|
2 |
|
3 |
# %% auto 0
|
4 |
+
__all__ = ['learn', 'categories', 'image', 'label', 'examples', 'intf', 'classify_image']
|
5 |
|
6 |
# %% ../app.ipynb 2
|
7 |
+
!pip install -Uqq fastai
|
8 |
from fastai.vision.all import *
|
9 |
import gradio as gr
|
10 |
|
app/app.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../app.ipynb.
|
2 |
+
|
3 |
+
# %% auto 0
|
4 |
+
__all__ = ['learn', 'categories', 'image', 'label', 'examples', 'intf', 'classify_image']
|
5 |
+
|
6 |
+
# %% ../app.ipynb 2
|
7 |
+
!pip install -Uqq fastai
|
8 |
+
from fastai.vision.all import *
|
9 |
+
import gradio as gr
|
10 |
+
|
11 |
+
def is_cat(x):
|
12 |
+
return x[0].isupper()
|
13 |
+
|
14 |
+
# %% ../app.ipynb 4
|
15 |
+
learn = load_learner('model.pkl')
|
16 |
+
|
17 |
+
# %% ../app.ipynb 6
|
18 |
+
categories = ('Dog', 'Cat')
|
19 |
+
|
20 |
+
def classify_image(img):
|
21 |
+
pred,idx,probs = learn.predict(img)
|
22 |
+
return dict(zip(categories, map(float, probs)))
|
23 |
+
|
24 |
+
# %% ../app.ipynb 8
|
25 |
+
image = gr.inputs.Image(shape=(192, 192))
|
26 |
+
label = gr.outputs.Label()
|
27 |
+
examples = ['dog.jpeg', 'cat.jpeg', 'ooconfuse.jpeg']
|
28 |
+
|
29 |
+
intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)
|
30 |
+
intf.launch(inline=False)
|