Spaces:
Running
Running
fix: pixel slider + remove reference
Browse files- app.py +13 -14
- pixelization.py +73 -11
- reference.png +0 -0
app.py
CHANGED
@@ -6,7 +6,7 @@ import argparse
|
|
6 |
import huggingface_hub
|
7 |
import os
|
8 |
|
9 |
-
TOKEN = os.environ['TOKEN']
|
10 |
|
11 |
def parse_args() -> argparse.Namespace:
|
12 |
parser = argparse.ArgumentParser()
|
@@ -26,19 +26,19 @@ def main():
|
|
26 |
|
27 |
# DL MODEL
|
28 |
# PIX_MODEL
|
29 |
-
os.environ['PIX_MODEL'] = huggingface_hub.hf_hub_download("NoCrypt/pixelization_models", "pixelart_vgg19.pth", token=TOKEN);
|
30 |
-
# NET_MODEL
|
31 |
-
os.environ['NET_MODEL'] = huggingface_hub.hf_hub_download("NoCrypt/pixelization_models", "160_net_G_A.pth", token=TOKEN);
|
32 |
-
# ALIAS_MODEL
|
33 |
-
os.environ['ALIAS_MODEL'] = huggingface_hub.hf_hub_download("NoCrypt/pixelization_models", "alias_net.pth", token=TOKEN);
|
34 |
-
|
35 |
-
# # For local testing
|
36 |
-
# # PIX_MODEL
|
37 |
-
# os.environ['PIX_MODEL'] = "pixelart_vgg19.pth"
|
38 |
# # NET_MODEL
|
39 |
-
# os.environ['NET_MODEL'] = "160_net_G_A.pth"
|
40 |
# # ALIAS_MODEL
|
41 |
-
# os.environ['ALIAS_MODEL'] = "alias_net.pth"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
|
43 |
|
44 |
use_cpu = True
|
@@ -51,8 +51,7 @@ def main():
|
|
51 |
gr.Interface(m.pixelize_modified,
|
52 |
[
|
53 |
gr.components.Image(type='pil', label='Input'),
|
54 |
-
gr.components.Slider(minimum=
|
55 |
-
gr.components.Slider(minimum=1, maximum=16, value=4, step=1, label='Pixel Size'),
|
56 |
gr.components.Checkbox(True, label="Upscale after")
|
57 |
],
|
58 |
gr.components.Image(type='pil', label='Output'),
|
|
|
6 |
import huggingface_hub
|
7 |
import os
|
8 |
|
9 |
+
# TOKEN = os.environ['TOKEN']
|
10 |
|
11 |
def parse_args() -> argparse.Namespace:
|
12 |
parser = argparse.ArgumentParser()
|
|
|
26 |
|
27 |
# DL MODEL
|
28 |
# PIX_MODEL
|
29 |
+
# os.environ['PIX_MODEL'] = huggingface_hub.hf_hub_download("NoCrypt/pixelization_models", "pixelart_vgg19.pth", token=TOKEN);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
# # NET_MODEL
|
31 |
+
# os.environ['NET_MODEL'] = huggingface_hub.hf_hub_download("NoCrypt/pixelization_models", "160_net_G_A.pth", token=TOKEN);
|
32 |
# # ALIAS_MODEL
|
33 |
+
# os.environ['ALIAS_MODEL'] = huggingface_hub.hf_hub_download("NoCrypt/pixelization_models", "alias_net.pth", token=TOKEN);
|
34 |
+
|
35 |
+
# For local testing
|
36 |
+
# PIX_MODEL
|
37 |
+
os.environ['PIX_MODEL'] = "pixelart_vgg19.pth"
|
38 |
+
# NET_MODEL
|
39 |
+
os.environ['NET_MODEL'] = "160_net_G_A.pth"
|
40 |
+
# ALIAS_MODEL
|
41 |
+
os.environ['ALIAS_MODEL'] = "alias_net.pth"
|
42 |
|
43 |
|
44 |
use_cpu = True
|
|
|
51 |
gr.Interface(m.pixelize_modified,
|
52 |
[
|
53 |
gr.components.Image(type='pil', label='Input'),
|
54 |
+
gr.components.Slider(minimum=4, maximum=32, value=4, step=1, label='Pixel Size'),
|
|
|
55 |
gr.components.Checkbox(True, label="Upscale after")
|
56 |
],
|
57 |
gr.components.Image(type='pil', label='Output'),
|
pixelization.py
CHANGED
@@ -7,12 +7,68 @@ from models.networks import define_G
|
|
7 |
import glob
|
8 |
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
class Model():
|
11 |
def __init__(self, device="cpu"):
|
12 |
self.device = torch.device(device)
|
13 |
self.G_A_net = None
|
14 |
self.alias_net = None
|
15 |
-
self.ref_t = None
|
16 |
|
17 |
def load(self):
|
18 |
with torch.no_grad():
|
@@ -29,9 +85,6 @@ class Model():
|
|
29 |
alias_state["module."+str(p)] = alias_state.pop(p)
|
30 |
self.alias_net.load_state_dict(alias_state)
|
31 |
|
32 |
-
ref_img = Image.open("reference.png").convert('L')
|
33 |
-
self.ref_t = process(greyscale(ref_img)).to(self.device)
|
34 |
-
|
35 |
def pixelize(self, in_img, out_img):
|
36 |
with torch.no_grad():
|
37 |
in_img = Image.open(in_img).convert('RGB')
|
@@ -41,20 +94,29 @@ class Model():
|
|
41 |
|
42 |
save(out_t, out_img)
|
43 |
|
44 |
-
def pixelize_modified(self, in_img,
|
45 |
with torch.no_grad():
|
46 |
in_img = in_img.convert('RGB')
|
47 |
|
48 |
-
# limit in_img size to 1024x1024
|
49 |
if in_img.size[0] > 1024 or in_img.size[1] > 1024:
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
|
|
|
55 |
in_t = process(in_img).to(self.device)
|
56 |
|
57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
img = to_image(out_t, pixel_size, upscale_after)
|
59 |
return img
|
60 |
|
|
|
7 |
import glob
|
8 |
|
9 |
|
10 |
+
pixelize_code = [
|
11 |
+
233356.8125, -27387.5918, -32866.8008, 126575.0312, -181590.0156,
|
12 |
+
-31543.1289, 50374.1289, 99631.4062, -188897.3750, 138322.7031,
|
13 |
+
-107266.2266, 125778.5781, 42416.1836, 139710.8594, -39614.6250,
|
14 |
+
-69972.6875, -21886.4141, 86938.4766, 31457.6270, -98892.2344,
|
15 |
+
-1191.5887, -61662.1719, -180121.9062, -32931.0859, 43109.0391,
|
16 |
+
21490.1328, -153485.3281, 94259.1797, 43103.1992, -231953.8125,
|
17 |
+
52496.7422, 142697.4062, -34882.7852, -98740.0625, 34458.5078,
|
18 |
+
-135436.3438, 11420.5488, -18895.8984, -71195.4141, 176947.2344,
|
19 |
+
-52747.5742, 109054.6562, -28124.9473, -17736.6152, -41327.1562,
|
20 |
+
69853.3906, 79046.2656, -3923.7344, -5644.5229, 96586.7578,
|
21 |
+
-89315.2656, -146578.0156, -61862.1484, -83956.4375, 87574.5703,
|
22 |
+
-75055.0469, 19571.8203, 79358.7891, -16501.5000, -147169.2188,
|
23 |
+
-97861.6797, 60442.1797, 40156.9023, 223136.3906, -81118.0547,
|
24 |
+
-221443.6406, 54911.6914, 54735.9258, -58805.7305, -168884.4844,
|
25 |
+
40865.9609, -28627.9043, -18604.7227, 120274.6172, 49712.2383,
|
26 |
+
164402.7031, -53165.0820, -60664.0469, -97956.1484, -121468.4062,
|
27 |
+
-69926.1484, -4889.0151, 127367.7344, 200241.0781, -85817.7578,
|
28 |
+
-143190.0625, -74049.5312, 137980.5781, -150788.7656, -115719.6719,
|
29 |
+
-189250.1250, -153069.7344, -127429.7891, -187588.2500, 125264.7422,
|
30 |
+
-79082.3438, -114144.5781, 36033.5039, -57502.2188, 80488.1562,
|
31 |
+
36501.4570, -138817.5938, -22189.6523, -222146.9688, -73292.3984,
|
32 |
+
127717.2422, -183836.3750, -105907.0859, 145422.8750, 66981.2031,
|
33 |
+
-9596.6699, 78099.4922, 70226.3359, 35841.8789, -116117.6016,
|
34 |
+
-150986.0156, 81622.4922, 113575.0625, 154419.4844, 53586.4141,
|
35 |
+
118494.8750, 131625.4375, -19763.1094, 75581.1172, -42750.5039,
|
36 |
+
97934.8281, 6706.7949, -101179.0078, 83519.6172, -83054.8359,
|
37 |
+
-56749.2578, -30683.6992, 54615.9492, 84061.1406, -229136.7188,
|
38 |
+
-60554.0000, 8120.2622, -106468.7891, -28316.3418, -166351.3125,
|
39 |
+
47797.3984, 96013.4141, 71482.9453, -101429.9297, 209063.3594,
|
40 |
+
-3033.6882, -38952.5352, -84920.6719, -5895.1543, -18641.8105,
|
41 |
+
47884.3633, -14620.0273, -132898.6719, -40903.5859, 197217.3750,
|
42 |
+
-128599.1328, -115397.8906, -22670.7676, -78569.9688, -54559.7070,
|
43 |
+
-106855.2031, 40703.1484, 55568.3164, 60202.9844, -64757.9375,
|
44 |
+
-32068.8652, 160663.3438, 72187.0703, -148519.5469, 162952.8906,
|
45 |
+
-128048.2031, -136153.8906, -15270.3730, -52766.3281, -52517.4531,
|
46 |
+
18652.1992, 195354.2188, -136657.3750, -8034.2622, -92699.6016,
|
47 |
+
-129169.1406, 188479.9844, 46003.7500, -93383.0781, -67831.6484,
|
48 |
+
-66710.5469, 104338.5234, 85878.8438, -73165.2031, 95857.3203,
|
49 |
+
71213.1250, 94603.1094, -30359.8125, -107989.2578, 99822.1719,
|
50 |
+
184626.3594, 79238.4531, -272978.9375, -137948.5781, -145245.8125,
|
51 |
+
75359.2031, 26652.7930, 50421.4141, 60784.4102, -18286.3398,
|
52 |
+
-182851.9531, -87178.7969, -13131.7539, 195674.8906, 59951.7852,
|
53 |
+
124353.7422, -36709.1758, -54575.4766, 77822.6953, 43697.4102,
|
54 |
+
-64394.3438, 113281.1797, -93987.0703, 221989.7188, 132902.5000,
|
55 |
+
-9538.8574, -14594.1338, 65084.9453, -12501.7227, 130330.6875,
|
56 |
+
-115123.4766, 20823.0898, 75512.4922, -75255.7422, -41936.7656,
|
57 |
+
-186678.8281, -166799.9375, 138770.6250, -78969.9531, 124516.8047,
|
58 |
+
-85558.5781, -69272.4375, -115539.1094, 228774.4844, -76529.3281,
|
59 |
+
-107735.8906, -76798.8906, -194335.2812, 56530.5742, -9397.7529,
|
60 |
+
132985.8281, 163929.8438, -188517.7969, -141155.6406, 45071.0391,
|
61 |
+
207788.3125, -125826.1172, 8965.3320, -159584.8438, 95842.4609,
|
62 |
+
-76929.4688
|
63 |
+
]
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
class Model():
|
68 |
def __init__(self, device="cpu"):
|
69 |
self.device = torch.device(device)
|
70 |
self.G_A_net = None
|
71 |
self.alias_net = None
|
|
|
72 |
|
73 |
def load(self):
|
74 |
with torch.no_grad():
|
|
|
85 |
alias_state["module."+str(p)] = alias_state.pop(p)
|
86 |
self.alias_net.load_state_dict(alias_state)
|
87 |
|
|
|
|
|
|
|
88 |
def pixelize(self, in_img, out_img):
|
89 |
with torch.no_grad():
|
90 |
in_img = Image.open(in_img).convert('RGB')
|
|
|
94 |
|
95 |
save(out_t, out_img)
|
96 |
|
97 |
+
def pixelize_modified(self, in_img, pixel_size, upscale_after) -> Image.Image:
|
98 |
with torch.no_grad():
|
99 |
in_img = in_img.convert('RGB')
|
100 |
|
101 |
+
# limit in_img size to 1024x1024 to maintain performance
|
102 |
if in_img.size[0] > 1024 or in_img.size[1] > 1024:
|
103 |
+
in_img.thumbnail((1024, 1024), Image.NEAREST)
|
104 |
+
|
105 |
+
# Killing inspect element users, I know what you're doing lol.
|
106 |
+
pixel_size = pixel_size if pixel_size >= 4 else 4
|
107 |
|
108 |
+
in_img = in_img.resize((in_img.size[0] * 4 // pixel_size, in_img.size[1] * 4 // pixel_size))
|
109 |
in_t = process(in_img).to(self.device)
|
110 |
|
111 |
+
|
112 |
+
# out_t = self.alias_net(self.G_A_net(in_t, self.ref_t))
|
113 |
+
feature = self.G_A_net.module.RGBEnc(in_t)
|
114 |
+
code = torch.asarray(pixelize_code, device=self.device).reshape((1, 256, 1, 1))
|
115 |
+
adain_params = self.G_A_net.module.MLP(code)
|
116 |
+
images = self.G_A_net.module.RGBDec(feature, adain_params)
|
117 |
+
out_t = self.alias_net(images)
|
118 |
+
|
119 |
+
|
120 |
img = to_image(out_t, pixel_size, upscale_after)
|
121 |
return img
|
122 |
|
reference.png
DELETED
Binary file (3.41 kB)
|
|