ruoshiliu commited on
Commit
61cdcf9
β€’
1 Parent(s): 09edff5

app code for precomputed

Browse files
Files changed (3) hide show
  1. README.md +2 -2
  2. app.py +97 -0
  3. run.ipynb +187 -0
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
  title: Zero123
3
- emoji: πŸ“ˆ
4
  colorFrom: indigo
5
- colorTo: yellow
6
  sdk: gradio
7
  sdk_version: 3.20.1
8
  app_file: app.py
 
1
  ---
2
  title: Zero123
3
+ emoji: πŸ‘€
4
  colorFrom: indigo
5
+ colorTo: purple
6
  sdk: gradio
7
  sdk_version: 3.20.1
8
  app_file: app.py
app.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import gradio as gr
3
+ import os
4
+ from PIL import Image
5
+ from functools import partial
6
+
7
+ def retrieve_input_image(dataset, inputs):
8
+ img_id = inputs
9
+ img_path = os.path.join('online_demo', dataset, 'step-100_scale-6.0', img_id, 'input.png')
10
+ image = Image.open(img_path)
11
+ return image
12
+
13
+ def retrieve_novel_view(dataset, img_id, polar, azimuth, zoom, seed):
14
+ polar = polar // 30 + 1
15
+ azimuth = azimuth // 30
16
+ zoom = int(zoom * 2 + 1)
17
+ img_path = os.path.join('online_demo', dataset, 'step-100_scale-6.0', img_id,\
18
+ 'polar-%d_azimuth-%d_distance-%d_seed-%d.png' % (polar, azimuth, zoom, seed))
19
+ image = Image.open(img_path)
20
+ return image
21
+
22
+
23
+ with gr.Blocks() as demo:
24
+ gr.Markdown("Flip text or image files using this demo.")
25
+ with gr.Tab("In-the-wild Images"):
26
+ with gr.Row():
27
+ with gr.Column(scale=1):
28
+ default_input_image = Image.open( os.path.join('online_demo', 'nerf_wild', 'step-100_scale-6.0', 'car1', 'input.png'))
29
+ input_image = gr.Image(default_input_image, shape=[256, 256])
30
+ options = sorted(os.listdir('online_demo/nerf_wild/step-100_scale-6.0'))
31
+ img_id = gr.Dropdown(options, value='car1')
32
+ text_button = gr.Button("Choose Input Image")
33
+ retrieve_input_image_dataset = partial(retrieve_input_image, 'nerf_wild')
34
+ text_button.click(retrieve_input_image_dataset, inputs=img_id, outputs=input_image)
35
+
36
+ with gr.Column(scale=1):
37
+ novel_view = gr.Image(shape=[256, 256])
38
+ inputs = [img_id,
39
+ gr.Slider(-30, 30, value=0, step=30, label='Polar angle (vertical rotation in degrees)'),
40
+ gr.Slider(0, 330, value=0, step=30, label='Azimuth angle (horizontal rotation in degrees)'),
41
+ gr.Slider(-0.5, 0.5, value=0, step=0.5, label='Zoom'),
42
+ gr.Slider(1, 4, value=1, step=1, label='Random seed')]
43
+
44
+ submit_button = gr.Button("Get Novel View")
45
+ retrieve_novel_view_dataset = partial(retrieve_novel_view, 'nerf_wild')
46
+ submit_button.click(retrieve_novel_view_dataset, inputs=inputs, outputs=novel_view)
47
+
48
+ with gr.Tab("Google Scanned Objects"):
49
+ with gr.Row():
50
+ with gr.Column(scale=1):
51
+ default_input_image = Image.open( os.path.join('online_demo', 'GSO', 'step-100_scale-6.0', 'SAMBA_HEMP', 'input.png'))
52
+ input_image = gr.Image(default_input_image, shape=[256, 256])
53
+ options = sorted(os.listdir('online_demo/GSO/step-100_scale-6.0'))
54
+ img_id = gr.Dropdown(options, value='SAMBA_HEMP')
55
+ text_button = gr.Button("Choose Input Image")
56
+ retrieve_input_image_dataset = partial(retrieve_input_image, 'GSO')
57
+ text_button.click(retrieve_input_image_dataset, inputs=img_id, outputs=input_image)
58
+
59
+ with gr.Column(scale=1):
60
+ novel_view = gr.Image(shape=[256, 256])
61
+ inputs = [img_id,
62
+ gr.Slider(-30, 30, value=0, step=30, label='Polar angle (vertical rotation in degrees)'),
63
+ gr.Slider(0, 330, value=0, step=30, label='Azimuth angle (horizontal rotation in degrees)'),
64
+ gr.Slider(-0.5, 0.5, value=0, step=0.5, label='Zoom'),
65
+ gr.Slider(1, 4, value=1, step=1, label='Random seed')]
66
+
67
+ submit_button = gr.Button("Get Novel View")
68
+ retrieve_novel_view_dataset = partial(retrieve_novel_view, 'GSO')
69
+ submit_button.click(retrieve_novel_view_dataset, inputs=inputs, outputs=novel_view)
70
+
71
+ with gr.Tab("RTMV"):
72
+ with gr.Row():
73
+ with gr.Column(scale=1):
74
+ default_input_image = Image.open( os.path.join('online_demo', 'RTMV', 'step-100_scale-6.0', '00000', 'input.png'))
75
+ input_image = gr.Image(default_input_image, shape=[256, 256])
76
+ options = sorted(os.listdir('online_demo/RTMV/step-100_scale-6.0'))
77
+ img_id = gr.Dropdown(options, value='00000')
78
+ text_button = gr.Button("Choose Input Image")
79
+ retrieve_input_image_dataset = partial(retrieve_input_image, 'RTMV')
80
+ text_button.click(retrieve_input_image_dataset, inputs=img_id, outputs=input_image)
81
+
82
+ with gr.Column(scale=1):
83
+ novel_view = gr.Image(shape=[256, 256])
84
+ inputs = [img_id,
85
+ gr.Slider(-30, 30, value=0, step=30, label='Polar angle (vertical rotation in degrees)'),
86
+ gr.Slider(0, 330, value=0, step=30, label='Azimuth angle (horizontal rotation in degrees)'),
87
+ gr.Slider(-0.5, 0.5, value=0, step=0.5, label='Zoom'),
88
+ gr.Slider(1, 4, value=1, step=1, label='Random seed')]
89
+
90
+ submit_button = gr.Button("Get Novel View")
91
+ retrieve_novel_view_dataset = partial(retrieve_novel_view, 'RTMV')
92
+ submit_button.click(retrieve_novel_view_dataset, inputs=inputs, outputs=novel_view)
93
+
94
+
95
+
96
+ if __name__ == "__main__":
97
+ demo.launch()
run.ipynb ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "id": 3.0293430767166755e+38,
6
+ "metadata": {
7
+ "id": 3.0293430767166755e+38
8
+ },
9
+ "source": [
10
+ "# Gradio Demo: blocks_flipper"
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "code",
15
+ "execution_count": 57,
16
+ "id": 2.8891853944186117e+38,
17
+ "metadata": {
18
+ "colab": {
19
+ "base_uri": "https://localhost:8080/",
20
+ "height": 616
21
+ },
22
+ "id": 2.8891853944186117e+38,
23
+ "outputId": "b60a6d5e-045d-4b40-bfd8-6caa407a34df",
24
+ "scrolled": false
25
+ },
26
+ "outputs": [
27
+ {
28
+ "name": "stdout",
29
+ "output_type": "stream",
30
+ "text": [
31
+ "\n",
32
+ "Thanks for being a Gradio user! If you have questions or feedback, please join our Discord server and chat with us: https://discord.gg/feTf9x3ZSB\n",
33
+ "Running on local URL: http://127.0.0.1:7908\n",
34
+ "\n",
35
+ "To create a public link, set `share=True` in `launch()`.\n"
36
+ ]
37
+ },
38
+ {
39
+ "data": {
40
+ "text/html": [
41
+ "<div><iframe src=\"http://127.0.0.1:7908/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
42
+ ],
43
+ "text/plain": [
44
+ "<IPython.core.display.HTML object>"
45
+ ]
46
+ },
47
+ "metadata": {},
48
+ "output_type": "display_data"
49
+ }
50
+ ],
51
+ "source": [
52
+ "import numpy as np\n",
53
+ "import gradio as gr\n",
54
+ "import os\n",
55
+ "from PIL import Image\n",
56
+ "from functools import partial\n",
57
+ "\n",
58
+ "def retrieve_input_image(dataset, inputs):\n",
59
+ " img_id = inputs\n",
60
+ " img_path = os.path.join('online_demo', dataset, 'step-100_scale-6.0', img_id, 'input.png')\n",
61
+ " image = Image.open(img_path)\n",
62
+ " return image\n",
63
+ "\n",
64
+ "def retrieve_novel_view(dataset, img_id, polar, azimuth, zoom, seed):\n",
65
+ " polar = polar // 30 + 1\n",
66
+ " azimuth = azimuth // 30\n",
67
+ " zoom = int(zoom * 2 + 1)\n",
68
+ " img_path = os.path.join('online_demo', dataset, 'step-100_scale-6.0', img_id,\\\n",
69
+ " 'polar-%d_azimuth-%d_distance-%d_seed-%d.png' % (polar, azimuth, zoom, seed))\n",
70
+ " image = Image.open(img_path)\n",
71
+ " return image\n",
72
+ " \n",
73
+ "\n",
74
+ "with gr.Blocks() as demo:\n",
75
+ " gr.Markdown(\"Flip text or image files using this demo.\")\n",
76
+ " with gr.Tab(\"In-the-wild Images\"):\n",
77
+ " with gr.Row():\n",
78
+ " with gr.Column(scale=1):\n",
79
+ " default_input_image = Image.open( os.path.join('online_demo', 'nerf_wild', 'step-100_scale-6.0', 'car1', 'input.png'))\n",
80
+ " input_image = gr.Image(default_input_image, shape=[256, 256])\n",
81
+ " options = sorted(os.listdir('online_demo/nerf_wild/step-100_scale-6.0'))\n",
82
+ " img_id = gr.Dropdown(options, value='car1')\n",
83
+ " text_button = gr.Button(\"Choose Input Image\")\n",
84
+ " retrieve_input_image_dataset = partial(retrieve_input_image, 'nerf_wild')\n",
85
+ " text_button.click(retrieve_input_image_dataset, inputs=img_id, outputs=input_image)\n",
86
+ "\n",
87
+ " with gr.Column(scale=1):\n",
88
+ " novel_view = gr.Image(shape=[256, 256])\n",
89
+ " inputs = [img_id,\n",
90
+ " gr.Slider(-30, 30, value=0, step=30, label='Polar angle (vertical rotation in degrees)'),\n",
91
+ " gr.Slider(0, 330, value=0, step=30, label='Azimuth angle (horizontal rotation in degrees)'),\n",
92
+ " gr.Slider(-0.5, 0.5, value=0, step=0.5, label='Zoom'),\n",
93
+ " gr.Slider(1, 4, value=1, step=1, label='Random seed')]\n",
94
+ " \n",
95
+ " submit_button = gr.Button(\"Get Novel View\")\n",
96
+ " retrieve_novel_view_dataset = partial(retrieve_novel_view, 'nerf_wild')\n",
97
+ " submit_button.click(retrieve_novel_view_dataset, inputs=inputs, outputs=novel_view)\n",
98
+ " \n",
99
+ " with gr.Tab(\"Google Scanned Objects\"):\n",
100
+ " with gr.Row():\n",
101
+ " with gr.Column(scale=1):\n",
102
+ " default_input_image = Image.open( os.path.join('online_demo', 'GSO', 'step-100_scale-6.0', 'SAMBA_HEMP', 'input.png'))\n",
103
+ " input_image = gr.Image(default_input_image, shape=[256, 256])\n",
104
+ " options = sorted(os.listdir('online_demo/GSO/step-100_scale-6.0'))\n",
105
+ " img_id = gr.Dropdown(options, value='SAMBA_HEMP')\n",
106
+ " text_button = gr.Button(\"Choose Input Image\")\n",
107
+ " retrieve_input_image_dataset = partial(retrieve_input_image, 'GSO')\n",
108
+ " text_button.click(retrieve_input_image_dataset, inputs=img_id, outputs=input_image)\n",
109
+ "\n",
110
+ " with gr.Column(scale=1):\n",
111
+ " novel_view = gr.Image(shape=[256, 256])\n",
112
+ " inputs = [img_id,\n",
113
+ " gr.Slider(-30, 30, value=0, step=30, label='Polar angle (vertical rotation in degrees)'),\n",
114
+ " gr.Slider(0, 330, value=0, step=30, label='Azimuth angle (horizontal rotation in degrees)'),\n",
115
+ " gr.Slider(-0.5, 0.5, value=0, step=0.5, label='Zoom'),\n",
116
+ " gr.Slider(1, 4, value=1, step=1, label='Random seed')]\n",
117
+ " \n",
118
+ " submit_button = gr.Button(\"Get Novel View\")\n",
119
+ " retrieve_novel_view_dataset = partial(retrieve_novel_view, 'GSO')\n",
120
+ " submit_button.click(retrieve_novel_view_dataset, inputs=inputs, outputs=novel_view)\n",
121
+ " \n",
122
+ " with gr.Tab(\"RTMV\"):\n",
123
+ " with gr.Row():\n",
124
+ " with gr.Column(scale=1):\n",
125
+ " default_input_image = Image.open( os.path.join('online_demo', 'RTMV', 'step-100_scale-6.0', '00000', 'input.png'))\n",
126
+ " input_image = gr.Image(default_input_image, shape=[256, 256])\n",
127
+ " options = sorted(os.listdir('online_demo/RTMV/step-100_scale-6.0'))\n",
128
+ " img_id = gr.Dropdown(options, value='00000')\n",
129
+ " text_button = gr.Button(\"Choose Input Image\")\n",
130
+ " retrieve_input_image_dataset = partial(retrieve_input_image, 'RTMV')\n",
131
+ " text_button.click(retrieve_input_image_dataset, inputs=img_id, outputs=input_image)\n",
132
+ "\n",
133
+ " with gr.Column(scale=1):\n",
134
+ " novel_view = gr.Image(shape=[256, 256])\n",
135
+ " inputs = [img_id,\n",
136
+ " gr.Slider(-30, 30, value=0, step=30, label='Polar angle (vertical rotation in degrees)'),\n",
137
+ " gr.Slider(0, 330, value=0, step=30, label='Azimuth angle (horizontal rotation in degrees)'),\n",
138
+ " gr.Slider(-0.5, 0.5, value=0, step=0.5, label='Zoom'),\n",
139
+ " gr.Slider(1, 4, value=1, step=1, label='Random seed')]\n",
140
+ " \n",
141
+ " submit_button = gr.Button(\"Get Novel View\")\n",
142
+ " retrieve_novel_view_dataset = partial(retrieve_novel_view, 'RTMV')\n",
143
+ " submit_button.click(retrieve_novel_view_dataset, inputs=inputs, outputs=novel_view)\n",
144
+ " \n",
145
+ " \n",
146
+ "\n",
147
+ "if __name__ == \"__main__\":\n",
148
+ " demo.launch()\n"
149
+ ]
150
+ },
151
+ {
152
+ "cell_type": "code",
153
+ "execution_count": null,
154
+ "id": "bk8_q39r_iGt",
155
+ "metadata": {
156
+ "id": "bk8_q39r_iGt"
157
+ },
158
+ "outputs": [],
159
+ "source": []
160
+ }
161
+ ],
162
+ "metadata": {
163
+ "colab": {
164
+ "provenance": []
165
+ },
166
+ "gpuClass": "standard",
167
+ "kernelspec": {
168
+ "display_name": "Python 3 (ipykernel)",
169
+ "language": "python",
170
+ "name": "python3"
171
+ },
172
+ "language_info": {
173
+ "codemirror_mode": {
174
+ "name": "ipython",
175
+ "version": 3
176
+ },
177
+ "file_extension": ".py",
178
+ "mimetype": "text/x-python",
179
+ "name": "python",
180
+ "nbconvert_exporter": "python",
181
+ "pygments_lexer": "ipython3",
182
+ "version": "3.9.12"
183
+ }
184
+ },
185
+ "nbformat": 4,
186
+ "nbformat_minor": 5
187
+ }