Ali-Maq commited on
Commit
d51e88d
β€’
1 Parent(s): bb5ed85

Upload 6 files

Browse files
Files changed (6) hide show
  1. Gradio.pdf +0 -0
  2. Gradio2.pdf +0 -0
  3. Gradio3.pdf +0 -0
  4. Gradio4.pdf +0 -0
  5. finalapp.ipynb +274 -0
  6. finalapp.py +274 -0
Gradio.pdf ADDED
Binary file (137 kB). View file
 
Gradio2.pdf ADDED
Binary file (97 kB). View file
 
Gradio3.pdf ADDED
Binary file (77.7 kB). View file
 
Gradio4.pdf ADDED
Binary file (60.5 kB). View file
 
finalapp.ipynb ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 19,
6
+ "metadata": {
7
+ "collapsed": true
8
+ },
9
+ "outputs": [],
10
+ "source": [
11
+ "import numpy as np\n",
12
+ "import gradio as gr\n",
13
+ "import requests\n",
14
+ "import json"
15
+ ]
16
+ },
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": 20,
20
+ "outputs": [],
21
+ "source": [
22
+ "def list_to_dict(data):\n",
23
+ " results = {}\n",
24
+ "\n",
25
+ " for i in range(len(data)):\n",
26
+ " # Access the i-th dictionary in the list using an integer index\n",
27
+ " d = data[i]\n",
28
+ " # Assign the value of the 'label' key to the 'score' value in the results dictionary\n",
29
+ " results[d['label']] = d['score']\n",
30
+ "\n",
31
+ " # The results dictionary will now contain the label-score pairs from the data list\n",
32
+ " return results"
33
+ ],
34
+ "metadata": {
35
+ "collapsed": false
36
+ }
37
+ },
38
+ {
39
+ "cell_type": "code",
40
+ "execution_count": 21,
41
+ "outputs": [],
42
+ "source": [
43
+ "\n",
44
+ "\n",
45
+ "API_URL = \"https://api-inference.huggingface.co/models/nateraw/food\"\n",
46
+ "headers = {\"Authorization\": \"Bearer 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\"}\n",
47
+ "\n",
48
+ "def query(filename):\n",
49
+ " with open(filename, \"rb\") as f:\n",
50
+ " data = f.read()\n",
51
+ " response = requests.request(\"POST\", API_URL, headers=headers, data=data)\n",
52
+ " output = json.loads(response.content.decode(\"utf-8\"))\n",
53
+ " return list_to_dict(output),json.dumps(output, indent=2, sort_keys=True)"
54
+ ],
55
+ "metadata": {
56
+ "collapsed": false
57
+ }
58
+ },
59
+ {
60
+ "cell_type": "code",
61
+ "execution_count": 27,
62
+ "outputs": [],
63
+ "source": [
64
+ "def get_nutrition_info(food_name):\n",
65
+ " #Make request to Nutritionix API\n",
66
+ " response = requests.get(\n",
67
+ " \"https://trackapi.nutritionix.com/v2/search/instant\",\n",
68
+ " params={\"query\": food_name},\n",
69
+ " headers={\n",
70
+ " \"x-app-id\": \"63a710ef\",\n",
71
+ " \"x-app-key\": \"3ddc7e3feda88e1cf6dd355fb26cb261\"\n",
72
+ " }\n",
73
+ " )\n",
74
+ " #Parse response and return relevant information\n",
75
+ " data = response.json()\n",
76
+ " response = data[\"branded\"][0][\"photo\"][\"thumb\"]\n",
77
+ "\n",
78
+ " # Open the image using PIL\n",
79
+ "\n",
80
+ " return {\n",
81
+ " \"food_name\": data[\"branded\"][0][\"food_name\"],\n",
82
+ " \"calories\": data[\"branded\"][0][\"nf_calories\"],\n",
83
+ " \"serving_size\": data[\"branded\"][0][\"serving_qty\"],\n",
84
+ " \"serving_unit\": data[\"branded\"][0][\"serving_unit\"],\n",
85
+ " #\"images\": data[\"branded\"][0][\"photo\"]\n",
86
+ " },response"
87
+ ],
88
+ "metadata": {
89
+ "collapsed": false
90
+ }
91
+ },
92
+ {
93
+ "cell_type": "code",
94
+ "execution_count": 28,
95
+ "outputs": [
96
+ {
97
+ "data": {
98
+ "text/plain": "({'food_name': 'Hamburger',\n 'calories': 340,\n 'serving_size': 1,\n 'serving_unit': 'sandwich'},\n 'https://d2eawub7utcl6.cloudfront.net/images/nix-apple-grey.png')"
99
+ },
100
+ "execution_count": 28,
101
+ "metadata": {},
102
+ "output_type": "execute_result"
103
+ }
104
+ ],
105
+ "source": [
106
+ "get_nutrition_info(\"Hamburger\")"
107
+ ],
108
+ "metadata": {
109
+ "collapsed": false
110
+ }
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": 22,
115
+ "outputs": [],
116
+ "source": [],
117
+ "metadata": {
118
+ "collapsed": false
119
+ }
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": 22,
124
+ "outputs": [],
125
+ "source": [],
126
+ "metadata": {
127
+ "collapsed": false
128
+ }
129
+ },
130
+ {
131
+ "cell_type": "code",
132
+ "execution_count": null,
133
+ "outputs": [
134
+ {
135
+ "name": "stdout",
136
+ "output_type": "stream",
137
+ "text": [
138
+ "Running on local URL: http://127.0.0.1:7869\n",
139
+ "Running on public URL: https://f7f1e48778aede65.gradio.app\n",
140
+ "\n",
141
+ "This share link expires in 72 hours. For free permanent hosting and GPU upgrades (NEW!), check out Spaces: https://huggingface.co/spaces\n"
142
+ ]
143
+ },
144
+ {
145
+ "data": {
146
+ "text/plain": "<IPython.core.display.HTML object>",
147
+ "text/html": "<div><iframe src=\"https://f7f1e48778aede65.gradio.app\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
148
+ },
149
+ "metadata": {},
150
+ "output_type": "display_data"
151
+ }
152
+ ],
153
+ "source": [
154
+ "with gr.Blocks() as demo:\n",
155
+ " gr.Markdown(\"Food-Classification-Calorie-Estimation and Volume-Estimation\")\n",
156
+ " with gr.Tab(\"Food Classification\"):\n",
157
+ " text_input = gr.Image(type=\"filepath\")\n",
158
+ " text_output = [gr.Label(num_top_classes=6),\n",
159
+ " gr.Textbox()\n",
160
+ " ]\n",
161
+ " text_button = gr.Button(\"Food Classification\")\n",
162
+ " with gr.Tab(\"Food Calorie Estimation\"):\n",
163
+ " image_input = gr.Textbox(label=\"Please enter the name of the Food you want to get calorie\")\n",
164
+ " image_output = [gr.Textbox(),\n",
165
+ " gr.Image(type=\"filepath\")\n",
166
+ " ]\n",
167
+ " image_button = gr.Button(\"Estimate Calories!\")\n",
168
+ " with gr.Tab(\"Volume Estimation\"):\n",
169
+ " _image_input = gr.Textbox(label=\"Please enter the name of the Food you want to get calorie\")\n",
170
+ " _image_output = [gr.Textbox(),\n",
171
+ " gr.Image()\n",
172
+ " ]\n",
173
+ " _image_button = gr.Button(\"Volume Calculation\")\n",
174
+ " with gr.Tab(\"Future Works\"):\n",
175
+ " gr.Markdown(\"Future work on Food Classification\")\n",
176
+ " gr.Markdown(\n",
177
+ " \"Currently the Model is trained on food-101 Dataset, which has 100 classes, In the future iteration of the project we would like to train the model on UNIMIB Dataset with 256 Food Classes\")\n",
178
+ " gr.Markdown(\"Future work on Volume Estimation\")\n",
179
+ " gr.Markdown(\n",
180
+ " \"The volume model has been trained on Apple AR Toolkit and thus can be executred only on Apple devices ie a iOS platform, In futur we would like to train the volume model such that it is Platform independent\")\n",
181
+ " gr.Markdown(\"Future work on Calorie Estimation\")\n",
182
+ " gr.Markdown(\n",
183
+ " \"The Calorie Estimation currently relies on Nutritionix API , In Future Iteration we would like to build our own Custom Database of Major Food Product across New York Restaurent\")\n",
184
+ " gr.Markdown(\"https://github.com/Ali-Maq/Food-Classification-Volume-Estimation-and-Calorie-Estimation/blob/main/README.md\")\n",
185
+ "\n",
186
+ " text_button.click(query, inputs=text_input, outputs=text_output)\n",
187
+ " image_button.click(get_nutrition_info, inputs=image_input, outputs=image_output)\n",
188
+ " _image_button.click(get_nutrition_info, inputs=_image_input, outputs=_image_output)\n",
189
+ " with gr.Accordion(\"Open for More!\"):\n",
190
+ " gr.Markdown(\"🍎 Designed and built by Ali Under the Guidance of Professor Dennis Shasha\")\n",
191
+ " gr.Markdown(\"Contact me at ali.quidwai@nyu.edu 😊\")\n",
192
+ "\n",
193
+ "demo.launch(share=True, debug=True)"
194
+ ],
195
+ "metadata": {
196
+ "collapsed": false,
197
+ "pycharm": {
198
+ "is_executing": true
199
+ }
200
+ }
201
+ },
202
+ {
203
+ "cell_type": "code",
204
+ "execution_count": null,
205
+ "outputs": [],
206
+ "source": [
207
+ "import numpy as np\n",
208
+ "import gradio as gr\n",
209
+ "\n",
210
+ "def flip_text(x):\n",
211
+ " return x[::-1]\n",
212
+ "\n",
213
+ "def flip_image(x):\n",
214
+ " return np.fliplr(x)\n",
215
+ "\n",
216
+ "with gr.Blocks() as demo:\n",
217
+ " gr.Markdown(\"Flip text or image files using this demo.\")\n",
218
+ " with gr.Tab(\"Flip Text\"):\n",
219
+ " text_input = gr.Textbox()\n",
220
+ " text_output = gr.Textbox()\n",
221
+ " text_button = gr.Button(\"Flip\")\n",
222
+ " with gr.Tab(\"Flip Image\"):\n",
223
+ " with gr.Row():\n",
224
+ " image_input = gr.Image()\n",
225
+ " image_output = gr.Image()\n",
226
+ " image_button = gr.Button(\"Flip\")\n",
227
+ "\n",
228
+ " with gr.Accordion(\"Open for More!\"):\n",
229
+ " gr.Markdown(\"Look at me...\")\n",
230
+ "\n",
231
+ " text_button.click(get_nutrition_info, inputs=text_input, outputs=text_output)\n",
232
+ " image_button.click(query, inputs=image_input, outputs=image_output)\n",
233
+ "\n",
234
+ "demo.launch()"
235
+ ],
236
+ "metadata": {
237
+ "collapsed": false,
238
+ "pycharm": {
239
+ "is_executing": true
240
+ }
241
+ }
242
+ },
243
+ {
244
+ "cell_type": "code",
245
+ "execution_count": null,
246
+ "outputs": [],
247
+ "source": [],
248
+ "metadata": {
249
+ "collapsed": false
250
+ }
251
+ }
252
+ ],
253
+ "metadata": {
254
+ "kernelspec": {
255
+ "display_name": "Python 3",
256
+ "language": "python",
257
+ "name": "python3"
258
+ },
259
+ "language_info": {
260
+ "codemirror_mode": {
261
+ "name": "ipython",
262
+ "version": 2
263
+ },
264
+ "file_extension": ".py",
265
+ "mimetype": "text/x-python",
266
+ "name": "python",
267
+ "nbconvert_exporter": "python",
268
+ "pygments_lexer": "ipython2",
269
+ "version": "2.7.6"
270
+ }
271
+ },
272
+ "nbformat": 4,
273
+ "nbformat_minor": 0
274
+ }
finalapp.py ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 19,
6
+ "metadata": {
7
+ "collapsed": true
8
+ },
9
+ "outputs": [],
10
+ "source": [
11
+ "import numpy as np\n",
12
+ "import gradio as gr\n",
13
+ "import requests\n",
14
+ "import json"
15
+ ]
16
+ },
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": 20,
20
+ "outputs": [],
21
+ "source": [
22
+ "def list_to_dict(data):\n",
23
+ " results = {}\n",
24
+ "\n",
25
+ " for i in range(len(data)):\n",
26
+ " # Access the i-th dictionary in the list using an integer index\n",
27
+ " d = data[i]\n",
28
+ " # Assign the value of the 'label' key to the 'score' value in the results dictionary\n",
29
+ " results[d['label']] = d['score']\n",
30
+ "\n",
31
+ " # The results dictionary will now contain the label-score pairs from the data list\n",
32
+ " return results"
33
+ ],
34
+ "metadata": {
35
+ "collapsed": false
36
+ }
37
+ },
38
+ {
39
+ "cell_type": "code",
40
+ "execution_count": 21,
41
+ "outputs": [],
42
+ "source": [
43
+ "\n",
44
+ "\n",
45
+ "API_URL = \"https://api-inference.huggingface.co/models/nateraw/food\"\n",
46
+ "headers = {\"Authorization\": \"Bearer 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\"}\n",
47
+ "\n",
48
+ "def query(filename):\n",
49
+ " with open(filename, \"rb\") as f:\n",
50
+ " data = f.read()\n",
51
+ " response = requests.request(\"POST\", API_URL, headers=headers, data=data)\n",
52
+ " output = json.loads(response.content.decode(\"utf-8\"))\n",
53
+ " return list_to_dict(output),json.dumps(output, indent=2, sort_keys=True)"
54
+ ],
55
+ "metadata": {
56
+ "collapsed": false
57
+ }
58
+ },
59
+ {
60
+ "cell_type": "code",
61
+ "execution_count": 27,
62
+ "outputs": [],
63
+ "source": [
64
+ "def get_nutrition_info(food_name):\n",
65
+ " #Make request to Nutritionix API\n",
66
+ " response = requests.get(\n",
67
+ " \"https://trackapi.nutritionix.com/v2/search/instant\",\n",
68
+ " params={\"query\": food_name},\n",
69
+ " headers={\n",
70
+ " \"x-app-id\": \"63a710ef\",\n",
71
+ " \"x-app-key\": \"3ddc7e3feda88e1cf6dd355fb26cb261\"\n",
72
+ " }\n",
73
+ " )\n",
74
+ " #Parse response and return relevant information\n",
75
+ " data = response.json()\n",
76
+ " response = data[\"branded\"][0][\"photo\"][\"thumb\"]\n",
77
+ "\n",
78
+ " # Open the image using PIL\n",
79
+ "\n",
80
+ " return {\n",
81
+ " \"food_name\": data[\"branded\"][0][\"food_name\"],\n",
82
+ " \"calories\": data[\"branded\"][0][\"nf_calories\"],\n",
83
+ " \"serving_size\": data[\"branded\"][0][\"serving_qty\"],\n",
84
+ " \"serving_unit\": data[\"branded\"][0][\"serving_unit\"],\n",
85
+ " #\"images\": data[\"branded\"][0][\"photo\"]\n",
86
+ " },response"
87
+ ],
88
+ "metadata": {
89
+ "collapsed": false
90
+ }
91
+ },
92
+ {
93
+ "cell_type": "code",
94
+ "execution_count": 28,
95
+ "outputs": [
96
+ {
97
+ "data": {
98
+ "text/plain": "({'food_name': 'Hamburger',\n 'calories': 340,\n 'serving_size': 1,\n 'serving_unit': 'sandwich'},\n 'https://d2eawub7utcl6.cloudfront.net/images/nix-apple-grey.png')"
99
+ },
100
+ "execution_count": 28,
101
+ "metadata": {},
102
+ "output_type": "execute_result"
103
+ }
104
+ ],
105
+ "source": [
106
+ "get_nutrition_info(\"Hamburger\")"
107
+ ],
108
+ "metadata": {
109
+ "collapsed": false
110
+ }
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": 22,
115
+ "outputs": [],
116
+ "source": [],
117
+ "metadata": {
118
+ "collapsed": false
119
+ }
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": 22,
124
+ "outputs": [],
125
+ "source": [],
126
+ "metadata": {
127
+ "collapsed": false
128
+ }
129
+ },
130
+ {
131
+ "cell_type": "code",
132
+ "execution_count": null,
133
+ "outputs": [
134
+ {
135
+ "name": "stdout",
136
+ "output_type": "stream",
137
+ "text": [
138
+ "Running on local URL: http://127.0.0.1:7869\n",
139
+ "Running on public URL: https://f7f1e48778aede65.gradio.app\n",
140
+ "\n",
141
+ "This share link expires in 72 hours. For free permanent hosting and GPU upgrades (NEW!), check out Spaces: https://huggingface.co/spaces\n"
142
+ ]
143
+ },
144
+ {
145
+ "data": {
146
+ "text/plain": "<IPython.core.display.HTML object>",
147
+ "text/html": "<div><iframe src=\"https://f7f1e48778aede65.gradio.app\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
148
+ },
149
+ "metadata": {},
150
+ "output_type": "display_data"
151
+ }
152
+ ],
153
+ "source": [
154
+ "with gr.Blocks() as demo:\n",
155
+ " gr.Markdown(\"Food-Classification-Calorie-Estimation and Volume-Estimation\")\n",
156
+ " with gr.Tab(\"Food Classification\"):\n",
157
+ " text_input = gr.Image(type=\"filepath\")\n",
158
+ " text_output = [gr.Label(num_top_classes=6),\n",
159
+ " gr.Textbox()\n",
160
+ " ]\n",
161
+ " text_button = gr.Button(\"Food Classification\")\n",
162
+ " with gr.Tab(\"Food Calorie Estimation\"):\n",
163
+ " image_input = gr.Textbox(label=\"Please enter the name of the Food you want to get calorie\")\n",
164
+ " image_output = [gr.Textbox(),\n",
165
+ " gr.Image(type=\"filepath\")\n",
166
+ " ]\n",
167
+ " image_button = gr.Button(\"Estimate Calories!\")\n",
168
+ " with gr.Tab(\"Volume Estimation\"):\n",
169
+ " _image_input = gr.Textbox(label=\"Please enter the name of the Food you want to get calorie\")\n",
170
+ " _image_output = [gr.Textbox(),\n",
171
+ " gr.Image()\n",
172
+ " ]\n",
173
+ " _image_button = gr.Button(\"Volume Calculation\")\n",
174
+ " with gr.Tab(\"Future Works\"):\n",
175
+ " gr.Markdown(\"Future work on Food Classification\")\n",
176
+ " gr.Markdown(\n",
177
+ " \"Currently the Model is trained on food-101 Dataset, which has 100 classes, In the future iteration of the project we would like to train the model on UNIMIB Dataset with 256 Food Classes\")\n",
178
+ " gr.Markdown(\"Future work on Volume Estimation\")\n",
179
+ " gr.Markdown(\n",
180
+ " \"The volume model has been trained on Apple AR Toolkit and thus can be executred only on Apple devices ie a iOS platform, In futur we would like to train the volume model such that it is Platform independent\")\n",
181
+ " gr.Markdown(\"Future work on Calorie Estimation\")\n",
182
+ " gr.Markdown(\n",
183
+ " \"The Calorie Estimation currently relies on Nutritionix API , In Future Iteration we would like to build our own Custom Database of Major Food Product across New York Restaurent\")\n",
184
+ " gr.Markdown(\"https://github.com/Ali-Maq/Food-Classification-Volume-Estimation-and-Calorie-Estimation/blob/main/README.md\")\n",
185
+ "\n",
186
+ " text_button.click(query, inputs=text_input, outputs=text_output)\n",
187
+ " image_button.click(get_nutrition_info, inputs=image_input, outputs=image_output)\n",
188
+ " _image_button.click(get_nutrition_info, inputs=_image_input, outputs=_image_output)\n",
189
+ " with gr.Accordion(\"Open for More!\"):\n",
190
+ " gr.Markdown(\"🍎 Designed and built by Ali Under the Guidance of Professor Dennis Shasha\")\n",
191
+ " gr.Markdown(\"Contact me at ali.quidwai@nyu.edu 😊\")\n",
192
+ "\n",
193
+ "demo.launch(share=True, debug=True)"
194
+ ],
195
+ "metadata": {
196
+ "collapsed": false,
197
+ "pycharm": {
198
+ "is_executing": true
199
+ }
200
+ }
201
+ },
202
+ {
203
+ "cell_type": "code",
204
+ "execution_count": null,
205
+ "outputs": [],
206
+ "source": [
207
+ "import numpy as np\n",
208
+ "import gradio as gr\n",
209
+ "\n",
210
+ "def flip_text(x):\n",
211
+ " return x[::-1]\n",
212
+ "\n",
213
+ "def flip_image(x):\n",
214
+ " return np.fliplr(x)\n",
215
+ "\n",
216
+ "with gr.Blocks() as demo:\n",
217
+ " gr.Markdown(\"Flip text or image files using this demo.\")\n",
218
+ " with gr.Tab(\"Flip Text\"):\n",
219
+ " text_input = gr.Textbox()\n",
220
+ " text_output = gr.Textbox()\n",
221
+ " text_button = gr.Button(\"Flip\")\n",
222
+ " with gr.Tab(\"Flip Image\"):\n",
223
+ " with gr.Row():\n",
224
+ " image_input = gr.Image()\n",
225
+ " image_output = gr.Image()\n",
226
+ " image_button = gr.Button(\"Flip\")\n",
227
+ "\n",
228
+ " with gr.Accordion(\"Open for More!\"):\n",
229
+ " gr.Markdown(\"Look at me...\")\n",
230
+ "\n",
231
+ " text_button.click(get_nutrition_info, inputs=text_input, outputs=text_output)\n",
232
+ " image_button.click(query, inputs=image_input, outputs=image_output)\n",
233
+ "\n",
234
+ "demo.launch()"
235
+ ],
236
+ "metadata": {
237
+ "collapsed": false,
238
+ "pycharm": {
239
+ "is_executing": true
240
+ }
241
+ }
242
+ },
243
+ {
244
+ "cell_type": "code",
245
+ "execution_count": null,
246
+ "outputs": [],
247
+ "source": [],
248
+ "metadata": {
249
+ "collapsed": false
250
+ }
251
+ }
252
+ ],
253
+ "metadata": {
254
+ "kernelspec": {
255
+ "display_name": "Python 3",
256
+ "language": "python",
257
+ "name": "python3"
258
+ },
259
+ "language_info": {
260
+ "codemirror_mode": {
261
+ "name": "ipython",
262
+ "version": 2
263
+ },
264
+ "file_extension": ".py",
265
+ "mimetype": "text/x-python",
266
+ "name": "python",
267
+ "nbconvert_exporter": "python",
268
+ "pygments_lexer": "ipython2",
269
+ "version": "2.7.6"
270
+ }
271
+ },
272
+ "nbformat": 4,
273
+ "nbformat_minor": 0
274
+ }