chansung commited on
Commit
872f59a
1 Parent(s): c4eaf24

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +201 -0
app.py ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ import numpy as np
4
+ from PIL import Image
5
+ import tensorflow as tf
6
+ from transformers import SegformerFeatureExtractor, TFSegformerForSemanticSegmentation
7
+
8
+ feature_extractor = SegformerFeatureExtractor.from_pretrained(
9
+ "nvidia/segformer-b5-finetuned-ade-640-640"
10
+ )
11
+ model = TFSegformerForSemanticSegmentation.from_pretrained(
12
+ "nvidia/segformer-b5-finetuned-ade-640-640", from_pt=True
13
+ )
14
+
15
+ def ade_palette():
16
+ """ADE20K palette that maps each class to RGB values."""
17
+ return [
18
+ [120, 120, 120],
19
+ [180, 120, 120],
20
+ [6, 230, 230],
21
+ [80, 50, 50],
22
+ [4, 200, 3],
23
+ [120, 120, 80],
24
+ [140, 140, 140],
25
+ [204, 5, 255],
26
+ [230, 230, 230],
27
+ [4, 250, 7],
28
+ [224, 5, 255],
29
+ [235, 255, 7],
30
+ [150, 5, 61],
31
+ [120, 120, 70],
32
+ [8, 255, 51],
33
+ [255, 6, 82],
34
+ [143, 255, 140],
35
+ [204, 255, 4],
36
+ [255, 51, 7],
37
+ [204, 70, 3],
38
+ [0, 102, 200],
39
+ [61, 230, 250],
40
+ [255, 6, 51],
41
+ [11, 102, 255],
42
+ [255, 7, 71],
43
+ [255, 9, 224],
44
+ [9, 7, 230],
45
+ [220, 220, 220],
46
+ [255, 9, 92],
47
+ [112, 9, 255],
48
+ [8, 255, 214],
49
+ [7, 255, 224],
50
+ [255, 184, 6],
51
+ [10, 255, 71],
52
+ [255, 41, 10],
53
+ [7, 255, 255],
54
+ [224, 255, 8],
55
+ [102, 8, 255],
56
+ [255, 61, 6],
57
+ [255, 194, 7],
58
+ [255, 122, 8],
59
+ [0, 255, 20],
60
+ [255, 8, 41],
61
+ [255, 5, 153],
62
+ [6, 51, 255],
63
+ [235, 12, 255],
64
+ [160, 150, 20],
65
+ [0, 163, 255],
66
+ [140, 140, 140],
67
+ [250, 10, 15],
68
+ [20, 255, 0],
69
+ [31, 255, 0],
70
+ [255, 31, 0],
71
+ [255, 224, 0],
72
+ [153, 255, 0],
73
+ [0, 0, 255],
74
+ [255, 71, 0],
75
+ [0, 235, 255],
76
+ [0, 173, 255],
77
+ [31, 0, 255],
78
+ [11, 200, 200],
79
+ [255, 82, 0],
80
+ [0, 255, 245],
81
+ [0, 61, 255],
82
+ [0, 255, 112],
83
+ [0, 255, 133],
84
+ [255, 0, 0],
85
+ [255, 163, 0],
86
+ [255, 102, 0],
87
+ [194, 255, 0],
88
+ [0, 143, 255],
89
+ [51, 255, 0],
90
+ [0, 82, 255],
91
+ [0, 255, 41],
92
+ [0, 255, 173],
93
+ [10, 0, 255],
94
+ [173, 255, 0],
95
+ [0, 255, 153],
96
+ [255, 92, 0],
97
+ [255, 0, 255],
98
+ [255, 0, 245],
99
+ [255, 0, 102],
100
+ [255, 173, 0],
101
+ [255, 0, 20],
102
+ [255, 184, 184],
103
+ [0, 31, 255],
104
+ [0, 255, 61],
105
+ [0, 71, 255],
106
+ [255, 0, 204],
107
+ [0, 255, 194],
108
+ [0, 255, 82],
109
+ [0, 10, 255],
110
+ [0, 112, 255],
111
+ [51, 0, 255],
112
+ [0, 194, 255],
113
+ [0, 122, 255],
114
+ [0, 255, 163],
115
+ [255, 153, 0],
116
+ [0, 255, 10],
117
+ [255, 112, 0],
118
+ [143, 255, 0],
119
+ [82, 0, 255],
120
+ [163, 255, 0],
121
+ [255, 235, 0],
122
+ [8, 184, 170],
123
+ [133, 0, 255],
124
+ [0, 255, 92],
125
+ [184, 0, 255],
126
+ [255, 0, 31],
127
+ [0, 184, 255],
128
+ [0, 214, 255],
129
+ [255, 0, 112],
130
+ [92, 255, 0],
131
+ [0, 224, 255],
132
+ [112, 224, 255],
133
+ [70, 184, 160],
134
+ [163, 0, 255],
135
+ [153, 0, 255],
136
+ [71, 255, 0],
137
+ [255, 0, 163],
138
+ [255, 204, 0],
139
+ [255, 0, 143],
140
+ [0, 255, 235],
141
+ [133, 255, 0],
142
+ [255, 0, 235],
143
+ [245, 0, 255],
144
+ [255, 0, 122],
145
+ [255, 245, 0],
146
+ [10, 190, 212],
147
+ [214, 255, 0],
148
+ [0, 204, 255],
149
+ [20, 0, 255],
150
+ [255, 255, 0],
151
+ [0, 153, 255],
152
+ [0, 41, 255],
153
+ [0, 255, 204],
154
+ [41, 0, 255],
155
+ [41, 255, 0],
156
+ [173, 0, 255],
157
+ [0, 245, 255],
158
+ [71, 0, 255],
159
+ [122, 0, 255],
160
+ [0, 255, 184],
161
+ [0, 92, 255],
162
+ [184, 255, 0],
163
+ [0, 133, 255],
164
+ [255, 214, 0],
165
+ [25, 194, 194],
166
+ [102, 255, 0],
167
+ [92, 0, 255],
168
+ ]
169
+
170
+ def sepia(input_img):
171
+ input_img = Image.fromarray(input_img)
172
+
173
+ inputs = feature_extractor(images=input_img, return_tensors="tf")
174
+ outputs = model(**inputs)
175
+ logits = outputs.logits
176
+
177
+ logits = tf.transpose(logits, [0, 2, 3, 1])
178
+ logits = tf.image.resize(
179
+ logits, input_img.size[::-1]
180
+ ) # We reverse the shape of `image` because `image.size` returns width and height.
181
+ seg = tf.math.argmax(logits, axis=-1)[0]
182
+
183
+ color_seg = np.zeros(
184
+ (seg.shape[0], seg.shape[1], 3), dtype=np.uint8
185
+ ) # height, width, 3
186
+ palette = np.array(ade_palette())
187
+
188
+ for label, color in enumerate(palette):
189
+ color_seg[seg == label, :] = color
190
+
191
+ # Convert to BGR
192
+ color_seg = color_seg[..., ::-1]
193
+
194
+ # Show image + mask
195
+ pred_img = np.array(input_img) * 0.5 + color_seg * 0.5
196
+ pred_img = pred_img.astype(np.uint8)
197
+ return pred_img
198
+
199
+ demo = gr.Interface(sepia, gr.Image(shape=(200, 200)), "image")
200
+
201
+ demo.launch()