Spaces:
Running
Running
gavinyuan
commited on
Commit
·
523fb10
1
Parent(s):
2845f22
delete: GPEN example images
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- app.py +1 -1
- inference/tricks.py +157 -0
- third_party/GPEN/.gitignore +137 -0
- third_party/GPEN/.idea/.gitignore +8 -0
- third_party/GPEN/.idea/deployment.xml +15 -0
- third_party/GPEN/.idea/inspectionProfiles/profiles_settings.xml +6 -0
- third_party/GPEN/.idea/modules.xml +8 -0
- third_party/GPEN/GPEN_inference.ipynb +0 -0
- third_party/GPEN/README.md +122 -0
- third_party/GPEN/__init_paths.py +33 -0
- third_party/GPEN/align_faces.py +266 -0
- third_party/GPEN/distributed.py +126 -0
- third_party/GPEN/face_colorization.py +48 -0
- third_party/GPEN/face_detect/data/FDDB/img_list.txt +2845 -0
- third_party/GPEN/face_detect/data/__init__.py +3 -0
- third_party/GPEN/face_detect/data/config.py +42 -0
- third_party/GPEN/face_detect/data/data_augment.py +237 -0
- third_party/GPEN/face_detect/data/wider_face.py +101 -0
- third_party/GPEN/face_detect/facemodels/__init__.py +0 -0
- third_party/GPEN/face_detect/facemodels/net.py +137 -0
- third_party/GPEN/face_detect/facemodels/retinaface.py +127 -0
- third_party/GPEN/face_detect/layers/__init__.py +2 -0
- third_party/GPEN/face_detect/layers/functions/prior_box.py +34 -0
- third_party/GPEN/face_detect/layers/modules/__init__.py +3 -0
- third_party/GPEN/face_detect/layers/modules/multibox_loss.py +125 -0
- third_party/GPEN/face_detect/retinaface_detection.py +193 -0
- third_party/GPEN/face_detect/utils/__init__.py +0 -0
- third_party/GPEN/face_detect/utils/box_utils.py +330 -0
- third_party/GPEN/face_detect/utils/nms/__init__.py +0 -0
- third_party/GPEN/face_detect/utils/nms/py_cpu_nms.py +38 -0
- third_party/GPEN/face_detect/utils/timer.py +40 -0
- third_party/GPEN/face_enhancement.py +161 -0
- third_party/GPEN/face_inpainting.py +101 -0
- third_party/GPEN/face_model/face_gan.py +89 -0
- third_party/GPEN/face_model/gpen_model.py +941 -0
- third_party/GPEN/face_model/op/__init__.py +2 -0
- third_party/GPEN/face_model/op/fused_act.py +96 -0
- third_party/GPEN/face_model/op/fused_bias_act.cpp +21 -0
- third_party/GPEN/face_model/op/fused_bias_act_kernel.cu +99 -0
- third_party/GPEN/face_model/op/upfirdn2d.cpp +23 -0
- third_party/GPEN/face_model/op/upfirdn2d.py +194 -0
- third_party/GPEN/face_model/op/upfirdn2d_kernel.cu +272 -0
- third_party/GPEN/face_parse/blocks.py +127 -0
- third_party/GPEN/face_parse/face_parsing.py +78 -0
- third_party/GPEN/face_parse/mask.png +0 -0
- third_party/GPEN/face_parse/parse_model.py +77 -0
- third_party/GPEN/face_parse/test.png +0 -0
- third_party/GPEN/infer_image.py +116 -0
- third_party/GPEN/infer_video.py +94 -0
- third_party/GPEN/misc/cog.yaml +17 -0
app.py
CHANGED
|
@@ -19,7 +19,7 @@ import tqdm
|
|
| 19 |
# from inference.utils import save, get_5_from_98, get_detector, get_lmk
|
| 20 |
# from inference.PIPNet.lib.tools import get_lmk_model, demo_image
|
| 21 |
# from inference.landmark_smooth import kalman_filter_landmark, savgol_filter_landmark
|
| 22 |
-
|
| 23 |
|
| 24 |
# make_abs_path = lambda fn: os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), fn))
|
| 25 |
#
|
|
|
|
| 19 |
# from inference.utils import save, get_5_from_98, get_detector, get_lmk
|
| 20 |
# from inference.PIPNet.lib.tools import get_lmk_model, demo_image
|
| 21 |
# from inference.landmark_smooth import kalman_filter_landmark, savgol_filter_landmark
|
| 22 |
+
from inference.tricks import Trick
|
| 23 |
|
| 24 |
# make_abs_path = lambda fn: os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), fn))
|
| 25 |
#
|
inference/tricks.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import torch
|
| 3 |
+
import torch.nn as nn
|
| 4 |
+
import torch.nn.functional as F
|
| 5 |
+
import cv2
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from third_party.bisenet.bisenet import BiSeNet
|
| 9 |
+
from third_party.GPEN.infer_image import GPENImageInfer
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
make_abs_path = lambda fn: os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), fn))
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class Trick(object):
|
| 16 |
+
def __init__(self):
|
| 17 |
+
self.gpen_model = None
|
| 18 |
+
self.mouth_helper = None
|
| 19 |
+
|
| 20 |
+
@staticmethod
|
| 21 |
+
def get_any_mask(img, par=None, normalized=False):
|
| 22 |
+
# [0, 'background', 1 'skin', 2 'l_brow', 3 'r_brow', 4 'l_eye', 5 'r_eye',
|
| 23 |
+
# 6 'eye_g', 7 'l_ear', 8 'r_ear', 9 'ear_r', 10 'nose', 11 'mouth', 12 'u_lip',
|
| 24 |
+
# 13 'l_lip', 14 'neck', 15 'neck_l', 16 'cloth', 17 'hair', 18 'hat']
|
| 25 |
+
ori_h, ori_w = img.shape[2], img.shape[3]
|
| 26 |
+
with torch.no_grad():
|
| 27 |
+
img = F.interpolate(img, size=512, mode="nearest", )
|
| 28 |
+
if not normalized:
|
| 29 |
+
img = img * 0.5 + 0.5
|
| 30 |
+
img = img.sub(vgg_mean.detach()).div(vgg_std.detach())
|
| 31 |
+
out = global_bisenet(img)[0]
|
| 32 |
+
parsing = out.softmax(1).argmax(1)
|
| 33 |
+
mask = torch.zeros_like(parsing)
|
| 34 |
+
for p in par:
|
| 35 |
+
mask = mask + ((parsing == p).float())
|
| 36 |
+
mask = mask.unsqueeze(1)
|
| 37 |
+
mask = F.interpolate(mask, size=(ori_h, ori_w), mode="bilinear", align_corners=True)
|
| 38 |
+
return mask
|
| 39 |
+
|
| 40 |
+
@staticmethod
|
| 41 |
+
def finetune_mask(facial_mask: np.ndarray, lmk_98: np.ndarray = None):
|
| 42 |
+
assert facial_mask.shape[1] == 256
|
| 43 |
+
facial_mask = (facial_mask * 255).astype(np.uint8)
|
| 44 |
+
# h_min = lmk_98[33:41, 0].min() + 20
|
| 45 |
+
h_min = 80
|
| 46 |
+
|
| 47 |
+
facial_mask = cv2.dilate(facial_mask, (40, 40), iterations=1)
|
| 48 |
+
facial_mask[:h_min] = 0 # black
|
| 49 |
+
facial_mask[255 - 20:] = 0
|
| 50 |
+
|
| 51 |
+
kernel_size = (20, 20)
|
| 52 |
+
blur_size = tuple(2 * j + 1 for j in kernel_size)
|
| 53 |
+
facial_mask = cv2.GaussianBlur(facial_mask, blur_size, 0)
|
| 54 |
+
|
| 55 |
+
return facial_mask.astype(np.float) / 255
|
| 56 |
+
|
| 57 |
+
@staticmethod
|
| 58 |
+
def smooth_mask(mask_tensor: torch.Tensor):
|
| 59 |
+
mask_tensor, _ = global_smooth_mask(mask_tensor)
|
| 60 |
+
return mask_tensor
|
| 61 |
+
|
| 62 |
+
@staticmethod
|
| 63 |
+
def tensor_to_arr(tensor):
|
| 64 |
+
return ((tensor + 1.) * 127.5).permute(0, 2, 3, 1).cpu().numpy().astype(np.uint8)
|
| 65 |
+
|
| 66 |
+
@staticmethod
|
| 67 |
+
def arr_to_tensor(arr, norm: bool = True):
|
| 68 |
+
tensor = torch.tensor(arr, dtype=torch.float).cuda() / 255 # in [0,1]
|
| 69 |
+
tensor = (tensor - 0.5) / 0.5 if norm else tensor # in [-1,1]
|
| 70 |
+
tensor = tensor.permute(0, 3, 1, 2)
|
| 71 |
+
return tensor
|
| 72 |
+
|
| 73 |
+
def gpen(self, img_np: np.ndarray, use_gpen=True):
|
| 74 |
+
if not use_gpen:
|
| 75 |
+
return img_np
|
| 76 |
+
if self.gpen_model is None:
|
| 77 |
+
self.gpen_model = GPENImageInfer()
|
| 78 |
+
img_np = self.gpen_model.image_infer(img_np)
|
| 79 |
+
return img_np
|
| 80 |
+
|
| 81 |
+
def finetune_mouth(self, i_s, i_t, i_r):
|
| 82 |
+
if self.mouth_helper is None:
|
| 83 |
+
self.load_mouth_helper()
|
| 84 |
+
helper_face = self.mouth_helper(i_s, i_t)[0]
|
| 85 |
+
i_r_mouth_mask = self.get_any_mask(i_r, par=[11, 12, 13]) # (B,1,H,W)
|
| 86 |
+
|
| 87 |
+
''' dilate and blur by cv2 '''
|
| 88 |
+
i_r_mouth_mask = self.tensor_to_arr(i_r_mouth_mask)[0] # (H,W,C)
|
| 89 |
+
i_r_mouth_mask = cv2.dilate(i_r_mouth_mask, (20, 20), iterations=1)
|
| 90 |
+
|
| 91 |
+
kernel_size = (5, 5)
|
| 92 |
+
blur_size = tuple(2 * j + 1 for j in kernel_size)
|
| 93 |
+
i_r_mouth_mask = cv2.GaussianBlur(i_r_mouth_mask, blur_size, 0) # (H,W,C)
|
| 94 |
+
i_r_mouth_mask = i_r_mouth_mask.squeeze()[None, :, :, None] # (1,H,W,1)
|
| 95 |
+
i_r_mouth_mask = self.arr_to_tensor(i_r_mouth_mask, norm=False) # in [0,1]
|
| 96 |
+
|
| 97 |
+
return helper_face * i_r_mouth_mask + i_r * (1 - i_r_mouth_mask)
|
| 98 |
+
|
| 99 |
+
def load_mouth_helper(self):
|
| 100 |
+
from inference.ffplus.eval import EvaluatorFaceShifter
|
| 101 |
+
mouth_helper_pl = EvaluatorFaceShifter(
|
| 102 |
+
load_path="/apdcephfs/share_1290939/gavinyuan/out/triplet10w_34/epoch=13-step=737999.ckpt",
|
| 103 |
+
pt_path=make_abs_path("../ffplus/extracted_ckpt/G_t34_helper_post.pth"),
|
| 104 |
+
benchmark=None,
|
| 105 |
+
demo_folder=None,
|
| 106 |
+
)
|
| 107 |
+
self.mouth_helper = mouth_helper_pl.faceswap_model
|
| 108 |
+
print("[Mouth helper] loaded.")
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
""" From MegaFS: https://github.com/zyainfal/One-Shot-Face-Swapping-on-Megapixels/tree/main/inference """
|
| 112 |
+
class SoftErosion(nn.Module):
|
| 113 |
+
def __init__(self, kernel_size=15, threshold=0.6, iterations=1):
|
| 114 |
+
super(SoftErosion, self).__init__()
|
| 115 |
+
r = kernel_size // 2
|
| 116 |
+
self.padding = r
|
| 117 |
+
self.iterations = iterations
|
| 118 |
+
self.threshold = threshold
|
| 119 |
+
|
| 120 |
+
# Create kernel
|
| 121 |
+
y_indices, x_indices = torch.meshgrid(torch.arange(0., kernel_size), torch.arange(0., kernel_size))
|
| 122 |
+
dist = torch.sqrt((x_indices - r) ** 2 + (y_indices - r) ** 2)
|
| 123 |
+
kernel = dist.max() - dist
|
| 124 |
+
kernel /= kernel.sum()
|
| 125 |
+
kernel = kernel.view(1, 1, *kernel.shape)
|
| 126 |
+
self.register_buffer('weight', kernel)
|
| 127 |
+
|
| 128 |
+
def forward(self, x):
|
| 129 |
+
x = x.float()
|
| 130 |
+
for i in range(self.iterations - 1):
|
| 131 |
+
x = torch.min(x, F.conv2d(x, weight=self.weight, groups=x.shape[1], padding=self.padding))
|
| 132 |
+
x = F.conv2d(x, weight=self.weight, groups=x.shape[1], padding=self.padding)
|
| 133 |
+
|
| 134 |
+
mask = x >= self.threshold
|
| 135 |
+
x[mask] = 1.0
|
| 136 |
+
x[~mask] /= x[~mask].max()
|
| 137 |
+
|
| 138 |
+
return x, mask
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
vgg_mean = torch.tensor([[[0.485]], [[0.456]], [[0.406]]],
|
| 142 |
+
requires_grad=False, device=torch.device(0))
|
| 143 |
+
vgg_std = torch.tensor([[[0.229]], [[0.224]], [[0.225]]],
|
| 144 |
+
requires_grad=False, device=torch.device(0))
|
| 145 |
+
def load_bisenet():
|
| 146 |
+
bisenet_model = BiSeNet(n_classes=19)
|
| 147 |
+
bisenet_model.load_state_dict(
|
| 148 |
+
torch.load(make_abs_path("/gavin/datasets/hanbang/79999_iter.pth",), map_location="cpu")
|
| 149 |
+
)
|
| 150 |
+
bisenet_model.eval()
|
| 151 |
+
bisenet_model = bisenet_model.cuda(0)
|
| 152 |
+
|
| 153 |
+
smooth_mask = SoftErosion(kernel_size=17, threshold=0.9, iterations=7).cuda()
|
| 154 |
+
print('[Global] bisenet loaded.')
|
| 155 |
+
return bisenet_model, smooth_mask
|
| 156 |
+
|
| 157 |
+
global_bisenet, global_smooth_mask = load_bisenet()
|
third_party/GPEN/.gitignore
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
pip-wheel-metadata/
|
| 24 |
+
share/python-wheels/
|
| 25 |
+
*.egg-info/
|
| 26 |
+
.installed.cfg
|
| 27 |
+
*.egg
|
| 28 |
+
MANIFEST
|
| 29 |
+
|
| 30 |
+
# PyInstaller
|
| 31 |
+
# Usually these files are written by a python script from a template
|
| 32 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 33 |
+
*.manifest
|
| 34 |
+
*.spec
|
| 35 |
+
|
| 36 |
+
# Installer logs
|
| 37 |
+
pip-log.txt
|
| 38 |
+
pip-delete-this-directory.txt
|
| 39 |
+
|
| 40 |
+
# Unit test / coverage reports
|
| 41 |
+
htmlcov/
|
| 42 |
+
.tox/
|
| 43 |
+
.nox/
|
| 44 |
+
.coverage
|
| 45 |
+
.coverage.*
|
| 46 |
+
.cache
|
| 47 |
+
nosetests.xml
|
| 48 |
+
coverage.xml
|
| 49 |
+
*.cover
|
| 50 |
+
*.py,cover
|
| 51 |
+
.hypothesis/
|
| 52 |
+
.pytest_cache/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
target/
|
| 76 |
+
|
| 77 |
+
# Jupyter Notebook
|
| 78 |
+
.ipynb_checkpoints
|
| 79 |
+
|
| 80 |
+
# IPython
|
| 81 |
+
profile_default/
|
| 82 |
+
ipython_config.py
|
| 83 |
+
|
| 84 |
+
# pyenv
|
| 85 |
+
.python-version
|
| 86 |
+
|
| 87 |
+
# pipenv
|
| 88 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 89 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 90 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 91 |
+
# install all needed dependencies.
|
| 92 |
+
#Pipfile.lock
|
| 93 |
+
|
| 94 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
| 95 |
+
__pypackages__/
|
| 96 |
+
|
| 97 |
+
# Celery stuff
|
| 98 |
+
celerybeat-schedule
|
| 99 |
+
celerybeat.pid
|
| 100 |
+
|
| 101 |
+
# SageMath parsed files
|
| 102 |
+
*.sage.py
|
| 103 |
+
|
| 104 |
+
# Environments
|
| 105 |
+
.env
|
| 106 |
+
.venv
|
| 107 |
+
env/
|
| 108 |
+
venv/
|
| 109 |
+
ENV/
|
| 110 |
+
env.bak/
|
| 111 |
+
venv.bak/
|
| 112 |
+
|
| 113 |
+
# Spyder project settings
|
| 114 |
+
.spyderproject
|
| 115 |
+
.spyproject
|
| 116 |
+
|
| 117 |
+
# Rope project settings
|
| 118 |
+
.ropeproject
|
| 119 |
+
|
| 120 |
+
# mkdocs documentation
|
| 121 |
+
/site
|
| 122 |
+
|
| 123 |
+
# mypy
|
| 124 |
+
.mypy_cache/
|
| 125 |
+
.dmypy.json
|
| 126 |
+
dmypy.json
|
| 127 |
+
|
| 128 |
+
# Pyre type checker
|
| 129 |
+
.pyre/
|
| 130 |
+
|
| 131 |
+
results
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
weights/*.pth
|
| 135 |
+
val
|
| 136 |
+
tmp
|
| 137 |
+
figs
|
third_party/GPEN/.idea/.gitignore
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Default ignored files
|
| 2 |
+
/shelf/
|
| 3 |
+
/workspace.xml
|
| 4 |
+
# Editor-based HTTP Client requests
|
| 5 |
+
/httpRequests/
|
| 6 |
+
# Datasource local storage ignored files
|
| 7 |
+
/dataSources/
|
| 8 |
+
/dataSources.local.xml
|
third_party/GPEN/.idea/deployment.xml
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="PublishConfigData" remoteFilesAllowedToDisappearOnAutoupload="false" confirmBeforeUploading="false">
|
| 4 |
+
<option name="confirmBeforeUploading" value="false" />
|
| 5 |
+
<serverData>
|
| 6 |
+
<paths name="GPU-root@9.134.229.28:36000">
|
| 7 |
+
<serverdata>
|
| 8 |
+
<mappings>
|
| 9 |
+
<mapping deploy="/gavin/code/GPEN" local="$PROJECT_DIR$" web="/" />
|
| 10 |
+
</mappings>
|
| 11 |
+
</serverdata>
|
| 12 |
+
</paths>
|
| 13 |
+
</serverData>
|
| 14 |
+
</component>
|
| 15 |
+
</project>
|
third_party/GPEN/.idea/inspectionProfiles/profiles_settings.xml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<component name="InspectionProjectProfileManager">
|
| 2 |
+
<settings>
|
| 3 |
+
<option name="USE_PROJECT_PROFILE" value="false" />
|
| 4 |
+
<version value="1.0" />
|
| 5 |
+
</settings>
|
| 6 |
+
</component>
|
third_party/GPEN/.idea/modules.xml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="ProjectModuleManager">
|
| 4 |
+
<modules>
|
| 5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/GPEN.iml" filepath="$PROJECT_DIR$/.idea/GPEN.iml" />
|
| 6 |
+
</modules>
|
| 7 |
+
</component>
|
| 8 |
+
</project>
|
third_party/GPEN/GPEN_inference.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
third_party/GPEN/README.md
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Notes
|
| 2 |
+
- 确保安装ffmpeg `yum install ffmpeg -y`
|
| 3 |
+
- 下载weights
|
| 4 |
+
- 或者直接 `source setup_env.sh`
|
| 5 |
+
### Infer vid
|
| 6 |
+
- `python3 infer_video.py --indir 视频 --outdir 视频输出位置(确保最多新建一个folder)`
|
| 7 |
+
- 提交任务: `source setup_env.sh && python3 infer_video.py xxx `
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# GAN Prior Embedded Network for Blind Face Restoration in the Wild
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
[Paper](https://arxiv.org/abs/2105.06070) | [Supplementary](https://www4.comp.polyu.edu.hk/~cslzhang/paper/GPEN-cvpr21-supp.pdf) | [Demo](https://vision.aliyun.com/experience/detail?spm=a211p3.14020179.J_7524944390.17.66cd4850wVDkUQ&tagName=facebody&children=EnhanceFace)
|
| 15 |
+
|
| 16 |
+
<a href="https://replicate.ai/yangxy/gpen"><img src="https://img.shields.io/static/v1?label=Replicate&message=Demo and Docker Image&color=blue"></a> [](https://huggingface.co/spaces/akhaliq/GPEN)
|
| 17 |
+
|
| 18 |
+
[Tao Yang](https://cg.cs.tsinghua.edu.cn/people/~tyang)<sup>1</sup>, Peiran Ren<sup>1</sup>, Xuansong Xie<sup>1</sup>, [Lei Zhang](https://www4.comp.polyu.edu.hk/~cslzhang)<sup>1,2</sup>
|
| 19 |
+
_<sup>1</sup>[DAMO Academy, Alibaba Group](https://damo.alibaba.com), Hangzhou, China_
|
| 20 |
+
_<sup>2</sup>[Department of Computing, The Hong Kong Polytechnic University](http://www.comp.polyu.edu.hk), Hong Kong, China_
|
| 21 |
+
|
| 22 |
+
#### Face Restoration
|
| 23 |
+
|
| 24 |
+
<img src="figs/real_00.png" width="390px"/> <img src="figs/real_01.png" width="390px"/>
|
| 25 |
+
<img src="figs/real_02.png" width="390px"/> <img src="figs/real_03.png" width="390px"/>
|
| 26 |
+
|
| 27 |
+
<img src="figs/Solvay_conference_1927_comp.jpg" width="784px"/>
|
| 28 |
+
|
| 29 |
+
#### Face Colorization
|
| 30 |
+
|
| 31 |
+
<img src="figs/colorization_00.jpg" width="390px"/> <img src="figs/colorization_01.jpg" width="390px"/>
|
| 32 |
+
|
| 33 |
+
#### Face Inpainting
|
| 34 |
+
|
| 35 |
+
<img src="figs/inpainting_00.jpg" width="390px"/> <img src="figs/inpainting_01.jpg" width="390px"/>
|
| 36 |
+
|
| 37 |
+
#### Conditional Image Synthesis (Seg2Face)
|
| 38 |
+
|
| 39 |
+
<img src="figs/seg2face_00.jpg" width="390px"/> <img src="figs/seg2face_01.jpg" width="390px"/>
|
| 40 |
+
|
| 41 |
+
## News
|
| 42 |
+
(2021-12-29) Add online demos <a href="https://replicate.ai/yangxy/gpen"><img src="https://img.shields.io/static/v1?label=Replicate&message=Demo and Docker Image&color=blue"></a> [](https://huggingface.co/spaces/akhaliq/GPEN). Many thanks to [CJWBW](https://github.com/CJWBW) and [AK391](https://github.com/AK391).
|
| 43 |
+
|
| 44 |
+
(2021-12-16) More models will be released including one-to-many FSRs. Stay tuned.
|
| 45 |
+
|
| 46 |
+
(2021-12-16) Release a simplified training code of GPEN. It differs from our implementation in the paper, but could achieve comparable performance. We strongly recommend to change the degradation model.
|
| 47 |
+
|
| 48 |
+
(2021-12-09) Add face parsing to better paste restored faces back.
|
| 49 |
+
|
| 50 |
+
(2021-12-09) GPEN can run on CPU now by simply discarding ``--use_cuda``.
|
| 51 |
+
|
| 52 |
+
(2021-12-01) GPEN can now work on a Windows machine without compiling cuda codes. Please check it out. Thanks to [Animadversio](https://github.com/rosinality/stylegan2-pytorch/issues/81). Alternatively, you can try [GPEN-Windows](https://drive.google.com/file/d/1YJJVnPGq90e_mWZxSGGTptNQilZNfOEO/view?usp=drivesdk). Many thanks to [Cioscos](https://github.com/yangxy/GPEN/issues/74).
|
| 53 |
+
|
| 54 |
+
(2021-10-22) GPEN can now work with SR methods. A SR model trained by myself is provided. Replace it with your own model if necessary.
|
| 55 |
+
|
| 56 |
+
(2021-10-11) The Colab demo for GPEN is available now <a href="https://colab.research.google.com/drive/1fPUsJCpQipp2Z5B5GbEXqpBGsMp-nvjm?usp=sharing"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="google colab logo"></a>.
|
| 57 |
+
|
| 58 |
+
## Usage
|
| 59 |
+
|
| 60 |
+

|
| 61 |
+

|
| 62 |
+

|
| 63 |
+

|
| 64 |
+

|
| 65 |
+
|
| 66 |
+
- Clone this repository:
|
| 67 |
+
```bash
|
| 68 |
+
git clone https://github.com/yangxy/GPEN.git
|
| 69 |
+
cd GPEN
|
| 70 |
+
```
|
| 71 |
+
- Download RetinaFace model and our pre-trained model (not our best model due to commercial issues) and put them into ``weights/``.
|
| 72 |
+
|
| 73 |
+
[RetinaFace-R50](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/RetinaFace-R50.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116085&Signature=GlUNW6%2B8FxvxWmE9jKIZYOOciKQ%3D) | [ParseNet-latest](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/ParseNet-latest.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116134&Signature=bnMwU1JogmNbARto6G%2B7iaJQCHs%3D) | [model_ir_se50](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/model_ir_se50.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116170&Signature=jEyBslytwpWoh5DfKvYe2H31GgE%3D) | [GPEN-BFR-512](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-BFR-512.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116208&Signature=hBgvVvKVSNGeXqT8glG%2Bd2t2OKc%3D) | [GPEN-BFR-512-D](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-BFR-512-D.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116234&Signature=mP7MvYhKjbsIM2lhmuaEysssWpc%3D) | [GPEN-BFR-256](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-BFR-256.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116259&Signature=kMGJLSHqnvzzzqwtjUVBgngzX2s%3D) | [GPEN-BFR-256-D](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-BFR-256-D.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116288&Signature=b7NCfHFzyqKh%2BfaLrRCwMIIZ2HA%3D) | [GPEN-Colorization-1024](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-Colorization-1024.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116315&Signature=9tPavW2h%2F1LhIKiXj73sTQoWqcc%3D) | [GPEN-Inpainting-1024](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-Inpainting-1024.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116338&Signature=tvYhdLaLgW7UdcUrApXp2jsek8w%3D) | [GPEN-Seg2face-512](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/GPEN-Seg2face-512.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116362&Signature=VOaHmjFy5YVBjMoNTpVk2KDJx9k%3D) | [rrdb_realesrnet_psnr](https://public-vigen-video.oss-cn-shanghai.aliyuncs.com/robin/models/rrdb_realesrnet_psnr.pth?OSSAccessKeyId=LTAI4G6bfnyW4TA4wFUXTYBe&Expires=1961116389&Signature=JZIBJOtfE5ePUyETslpDQsYwHpU%3D)
|
| 74 |
+
|
| 75 |
+
- Restore face images:
|
| 76 |
+
```bash
|
| 77 |
+
python face_enhancement.py --model GPEN-BFR-512 --size 512 --channel_multiplier 2 --narrow 1 --use_sr --use_cuda --indir examples/imgs --outdir examples/outs-BFR
|
| 78 |
+
```
|
| 79 |
+
|
| 80 |
+
- Colorize faces:
|
| 81 |
+
```bash
|
| 82 |
+
python face_colorization.py
|
| 83 |
+
```
|
| 84 |
+
|
| 85 |
+
- Complete faces:
|
| 86 |
+
```bash
|
| 87 |
+
python face_inpainting.py
|
| 88 |
+
```
|
| 89 |
+
|
| 90 |
+
- Synthesize faces:
|
| 91 |
+
```bash
|
| 92 |
+
python segmentation2face.py
|
| 93 |
+
```
|
| 94 |
+
|
| 95 |
+
- Train GPEN for BFR with 4 GPUs:
|
| 96 |
+
```bash
|
| 97 |
+
CUDA_VISIBLE_DEVICES='0,1,2,3' python -m torch.distributed.launch --nproc_per_node=4 --master_port=4321 train_simple.py --size 1024 --channel_multiplier 2 --narrow 1 --ckpt weights --sample results --batch 2 --path your_path_of_croped+aligned_hq_faces (e.g., FFHQ)
|
| 98 |
+
|
| 99 |
+
```
|
| 100 |
+
When testing your own model, set ``--key g_ema``.
|
| 101 |
+
|
| 102 |
+
## Main idea
|
| 103 |
+
<img src="figs/architecture.png" width="784px"/>
|
| 104 |
+
|
| 105 |
+
## Citation
|
| 106 |
+
If our work is useful for your research, please consider citing:
|
| 107 |
+
|
| 108 |
+
@inproceedings{Yang2021GPEN,
|
| 109 |
+
title={GAN Prior Embedded Network for Blind Face Restoration in the Wild},
|
| 110 |
+
author={Tao Yang, Peiran Ren, Xuansong Xie, and Lei Zhang},
|
| 111 |
+
booktitle={IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
|
| 112 |
+
year={2021}
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
## License
|
| 116 |
+
© Alibaba, 2021. For academic and non-commercial use only.
|
| 117 |
+
|
| 118 |
+
## Acknowledgments
|
| 119 |
+
We borrow some codes from [Pytorch_Retinaface](https://github.com/biubug6/Pytorch_Retinaface), [stylegan2-pytorch](https://github.com/rosinality/stylegan2-pytorch), [Real-ESRGAN](https://github.com/xinntao/Real-ESRGAN), and [GFPGAN](https://github.com/TencentARC/GFPGAN).
|
| 120 |
+
|
| 121 |
+
## Contact
|
| 122 |
+
If you have any questions or suggestions about this paper, feel free to reach me at yangtao9009@gmail.com.
|
third_party/GPEN/__init_paths.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
'''
|
| 5 |
+
import os.path as osp
|
| 6 |
+
import sys
|
| 7 |
+
|
| 8 |
+
def add_path(path):
|
| 9 |
+
if path not in sys.path:
|
| 10 |
+
sys.path.insert(0, path)
|
| 11 |
+
|
| 12 |
+
this_dir = osp.dirname(__file__)
|
| 13 |
+
|
| 14 |
+
path = osp.join(this_dir, 'face_detect')
|
| 15 |
+
add_path(path)
|
| 16 |
+
|
| 17 |
+
path = osp.join(this_dir, 'face_parse')
|
| 18 |
+
add_path(path)
|
| 19 |
+
|
| 20 |
+
path = osp.join(this_dir, 'face_model')
|
| 21 |
+
add_path(path)
|
| 22 |
+
|
| 23 |
+
path = osp.join(this_dir, 'sr_model')
|
| 24 |
+
add_path(path)
|
| 25 |
+
|
| 26 |
+
path = osp.join(this_dir, 'training')
|
| 27 |
+
add_path(path)
|
| 28 |
+
|
| 29 |
+
path = osp.join(this_dir, 'training/loss')
|
| 30 |
+
add_path(path)
|
| 31 |
+
|
| 32 |
+
path = osp.join(this_dir, 'training/data_loader')
|
| 33 |
+
add_path(path)
|
third_party/GPEN/align_faces.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
Created on Mon Apr 24 15:43:29 2017
|
| 4 |
+
@author: zhaoy
|
| 5 |
+
"""
|
| 6 |
+
"""
|
| 7 |
+
@Modified by yangxy (yangtao9009@gmail.com)
|
| 8 |
+
"""
|
| 9 |
+
import cv2
|
| 10 |
+
import numpy as np
|
| 11 |
+
from skimage import transform as trans
|
| 12 |
+
|
| 13 |
+
# reference facial points, a list of coordinates (x,y)
|
| 14 |
+
REFERENCE_FACIAL_POINTS = [
|
| 15 |
+
[30.29459953, 51.69630051],
|
| 16 |
+
[65.53179932, 51.50139999],
|
| 17 |
+
[48.02519989, 71.73660278],
|
| 18 |
+
[33.54930115, 92.3655014],
|
| 19 |
+
[62.72990036, 92.20410156]
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
DEFAULT_CROP_SIZE = (96, 112)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _umeyama(src, dst, estimate_scale=True, scale=1.0):
|
| 26 |
+
"""Estimate N-D similarity transformation with or without scaling.
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
src : (M, N) array
|
| 30 |
+
Source coordinates.
|
| 31 |
+
dst : (M, N) array
|
| 32 |
+
Destination coordinates.
|
| 33 |
+
estimate_scale : bool
|
| 34 |
+
Whether to estimate scaling factor.
|
| 35 |
+
Returns
|
| 36 |
+
-------
|
| 37 |
+
T : (N + 1, N + 1)
|
| 38 |
+
The homogeneous similarity transformation matrix. The matrix contains
|
| 39 |
+
NaN values only if the problem is not well-conditioned.
|
| 40 |
+
References
|
| 41 |
+
----------
|
| 42 |
+
.. [1] "Least-squares estimation of transformation parameters between two
|
| 43 |
+
point patterns", Shinji Umeyama, PAMI 1991, :DOI:`10.1109/34.88573`
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
num = src.shape[0]
|
| 47 |
+
dim = src.shape[1]
|
| 48 |
+
|
| 49 |
+
# Compute mean of src and dst.
|
| 50 |
+
src_mean = src.mean(axis=0)
|
| 51 |
+
dst_mean = dst.mean(axis=0)
|
| 52 |
+
|
| 53 |
+
# Subtract mean from src and dst.
|
| 54 |
+
src_demean = src - src_mean
|
| 55 |
+
dst_demean = dst - dst_mean
|
| 56 |
+
|
| 57 |
+
# Eq. (38).
|
| 58 |
+
A = dst_demean.T @ src_demean / num
|
| 59 |
+
|
| 60 |
+
# Eq. (39).
|
| 61 |
+
d = np.ones((dim,), dtype=np.double)
|
| 62 |
+
if np.linalg.det(A) < 0:
|
| 63 |
+
d[dim - 1] = -1
|
| 64 |
+
|
| 65 |
+
T = np.eye(dim + 1, dtype=np.double)
|
| 66 |
+
|
| 67 |
+
U, S, V = np.linalg.svd(A)
|
| 68 |
+
|
| 69 |
+
# Eq. (40) and (43).
|
| 70 |
+
rank = np.linalg.matrix_rank(A)
|
| 71 |
+
if rank == 0:
|
| 72 |
+
return np.nan * T
|
| 73 |
+
elif rank == dim - 1:
|
| 74 |
+
if np.linalg.det(U) * np.linalg.det(V) > 0:
|
| 75 |
+
T[:dim, :dim] = U @ V
|
| 76 |
+
else:
|
| 77 |
+
s = d[dim - 1]
|
| 78 |
+
d[dim - 1] = -1
|
| 79 |
+
T[:dim, :dim] = U @ np.diag(d) @ V
|
| 80 |
+
d[dim - 1] = s
|
| 81 |
+
else:
|
| 82 |
+
T[:dim, :dim] = U @ np.diag(d) @ V
|
| 83 |
+
|
| 84 |
+
if estimate_scale:
|
| 85 |
+
# Eq. (41) and (42).
|
| 86 |
+
scale = 1.0 / src_demean.var(axis=0).sum() * (S @ d)
|
| 87 |
+
else:
|
| 88 |
+
scale = scale
|
| 89 |
+
|
| 90 |
+
T[:dim, dim] = dst_mean - scale * (T[:dim, :dim] @ src_mean.T)
|
| 91 |
+
T[:dim, :dim] *= scale
|
| 92 |
+
|
| 93 |
+
return T, scale
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class FaceWarpException(Exception):
|
| 97 |
+
def __str__(self):
|
| 98 |
+
return 'In File {}:{}'.format(
|
| 99 |
+
__file__, super.__str__(self))
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def get_reference_facial_points(output_size=None,
|
| 103 |
+
inner_padding_factor=0.0,
|
| 104 |
+
outer_padding=(0, 0),
|
| 105 |
+
default_square=False):
|
| 106 |
+
tmp_5pts = np.array(REFERENCE_FACIAL_POINTS)
|
| 107 |
+
tmp_crop_size = np.array(DEFAULT_CROP_SIZE)
|
| 108 |
+
|
| 109 |
+
# 0) make the inner region a square
|
| 110 |
+
if default_square:
|
| 111 |
+
size_diff = max(tmp_crop_size) - tmp_crop_size
|
| 112 |
+
tmp_5pts += size_diff / 2
|
| 113 |
+
tmp_crop_size += size_diff
|
| 114 |
+
|
| 115 |
+
if (output_size and
|
| 116 |
+
output_size[0] == tmp_crop_size[0] and
|
| 117 |
+
output_size[1] == tmp_crop_size[1]):
|
| 118 |
+
print('output_size == DEFAULT_CROP_SIZE {}: return default reference points'.format(tmp_crop_size))
|
| 119 |
+
return tmp_5pts
|
| 120 |
+
|
| 121 |
+
if (inner_padding_factor == 0 and
|
| 122 |
+
outer_padding == (0, 0)):
|
| 123 |
+
if output_size is None:
|
| 124 |
+
print('No paddings to do: return default reference points')
|
| 125 |
+
return tmp_5pts
|
| 126 |
+
else:
|
| 127 |
+
raise FaceWarpException(
|
| 128 |
+
'No paddings to do, output_size must be None or {}'.format(tmp_crop_size))
|
| 129 |
+
|
| 130 |
+
# check output size
|
| 131 |
+
if not (0 <= inner_padding_factor <= 1.0):
|
| 132 |
+
raise FaceWarpException('Not (0 <= inner_padding_factor <= 1.0)')
|
| 133 |
+
|
| 134 |
+
if ((inner_padding_factor > 0 or outer_padding[0] > 0 or outer_padding[1] > 0)
|
| 135 |
+
and output_size is None):
|
| 136 |
+
output_size = tmp_crop_size * \
|
| 137 |
+
(1 + inner_padding_factor * 2).astype(np.int32)
|
| 138 |
+
output_size += np.array(outer_padding)
|
| 139 |
+
print(' deduced from paddings, output_size = ', output_size)
|
| 140 |
+
|
| 141 |
+
if not (outer_padding[0] < output_size[0]
|
| 142 |
+
and outer_padding[1] < output_size[1]):
|
| 143 |
+
raise FaceWarpException('Not (outer_padding[0] < output_size[0]'
|
| 144 |
+
'and outer_padding[1] < output_size[1])')
|
| 145 |
+
|
| 146 |
+
# 1) pad the inner region according inner_padding_factor
|
| 147 |
+
# print('---> STEP1: pad the inner region according inner_padding_factor')
|
| 148 |
+
if inner_padding_factor > 0:
|
| 149 |
+
size_diff = tmp_crop_size * inner_padding_factor * 2
|
| 150 |
+
tmp_5pts += size_diff / 2
|
| 151 |
+
tmp_crop_size += np.round(size_diff).astype(np.int32)
|
| 152 |
+
|
| 153 |
+
# print(' crop_size = ', tmp_crop_size)
|
| 154 |
+
# print(' reference_5pts = ', tmp_5pts)
|
| 155 |
+
|
| 156 |
+
# 2) resize the padded inner region
|
| 157 |
+
# print('---> STEP2: resize the padded inner region')
|
| 158 |
+
size_bf_outer_pad = np.array(output_size) - np.array(outer_padding) * 2
|
| 159 |
+
# print(' crop_size = ', tmp_crop_size)
|
| 160 |
+
# print(' size_bf_outer_pad = ', size_bf_outer_pad)
|
| 161 |
+
|
| 162 |
+
if size_bf_outer_pad[0] * tmp_crop_size[1] != size_bf_outer_pad[1] * tmp_crop_size[0]:
|
| 163 |
+
raise FaceWarpException('Must have (output_size - outer_padding)'
|
| 164 |
+
'= some_scale * (crop_size * (1.0 + inner_padding_factor)')
|
| 165 |
+
|
| 166 |
+
scale_factor = size_bf_outer_pad[0].astype(np.float32) / tmp_crop_size[0]
|
| 167 |
+
# print(' resize scale_factor = ', scale_factor)
|
| 168 |
+
tmp_5pts = tmp_5pts * scale_factor
|
| 169 |
+
# size_diff = tmp_crop_size * (scale_factor - min(scale_factor))
|
| 170 |
+
# tmp_5pts = tmp_5pts + size_diff / 2
|
| 171 |
+
tmp_crop_size = size_bf_outer_pad
|
| 172 |
+
# print(' crop_size = ', tmp_crop_size)
|
| 173 |
+
# print(' reference_5pts = ', tmp_5pts)
|
| 174 |
+
|
| 175 |
+
# 3) add outer_padding to make output_size
|
| 176 |
+
reference_5point = tmp_5pts + np.array(outer_padding)
|
| 177 |
+
tmp_crop_size = output_size
|
| 178 |
+
# print('---> STEP3: add outer_padding to make output_size')
|
| 179 |
+
# print(' crop_size = ', tmp_crop_size)
|
| 180 |
+
# print(' reference_5pts = ', tmp_5pts)
|
| 181 |
+
#
|
| 182 |
+
# print('===> end get_reference_facial_points\n')
|
| 183 |
+
|
| 184 |
+
return reference_5point
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def get_affine_transform_matrix(src_pts, dst_pts):
|
| 188 |
+
tfm = np.float32([[1, 0, 0], [0, 1, 0]])
|
| 189 |
+
n_pts = src_pts.shape[0]
|
| 190 |
+
ones = np.ones((n_pts, 1), src_pts.dtype)
|
| 191 |
+
src_pts_ = np.hstack([src_pts, ones])
|
| 192 |
+
dst_pts_ = np.hstack([dst_pts, ones])
|
| 193 |
+
|
| 194 |
+
A, res, rank, s = np.linalg.lstsq(src_pts_, dst_pts_)
|
| 195 |
+
|
| 196 |
+
if rank == 3:
|
| 197 |
+
tfm = np.float32([
|
| 198 |
+
[A[0, 0], A[1, 0], A[2, 0]],
|
| 199 |
+
[A[0, 1], A[1, 1], A[2, 1]]
|
| 200 |
+
])
|
| 201 |
+
elif rank == 2:
|
| 202 |
+
tfm = np.float32([
|
| 203 |
+
[A[0, 0], A[1, 0], 0],
|
| 204 |
+
[A[0, 1], A[1, 1], 0]
|
| 205 |
+
])
|
| 206 |
+
|
| 207 |
+
return tfm
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def warp_and_crop_face(src_img,
|
| 211 |
+
facial_pts,
|
| 212 |
+
reference_pts=None,
|
| 213 |
+
crop_size=(96, 112),
|
| 214 |
+
align_type='smilarity'): #smilarity cv2_affine affine
|
| 215 |
+
if reference_pts is None:
|
| 216 |
+
if crop_size[0] == 96 and crop_size[1] == 112:
|
| 217 |
+
reference_pts = REFERENCE_FACIAL_POINTS
|
| 218 |
+
else:
|
| 219 |
+
default_square = False
|
| 220 |
+
inner_padding_factor = 0
|
| 221 |
+
outer_padding = (0, 0)
|
| 222 |
+
output_size = crop_size
|
| 223 |
+
|
| 224 |
+
reference_pts = get_reference_facial_points(output_size,
|
| 225 |
+
inner_padding_factor,
|
| 226 |
+
outer_padding,
|
| 227 |
+
default_square)
|
| 228 |
+
|
| 229 |
+
ref_pts = np.float32(reference_pts)
|
| 230 |
+
ref_pts_shp = ref_pts.shape
|
| 231 |
+
if max(ref_pts_shp) < 3 or min(ref_pts_shp) != 2:
|
| 232 |
+
raise FaceWarpException(
|
| 233 |
+
'reference_pts.shape must be (K,2) or (2,K) and K>2')
|
| 234 |
+
|
| 235 |
+
if ref_pts_shp[0] == 2:
|
| 236 |
+
ref_pts = ref_pts.T
|
| 237 |
+
|
| 238 |
+
src_pts = np.float32(facial_pts)
|
| 239 |
+
src_pts_shp = src_pts.shape
|
| 240 |
+
if max(src_pts_shp) < 3 or min(src_pts_shp) != 2:
|
| 241 |
+
raise FaceWarpException(
|
| 242 |
+
'facial_pts.shape must be (K,2) or (2,K) and K>2')
|
| 243 |
+
|
| 244 |
+
if src_pts_shp[0] == 2:
|
| 245 |
+
src_pts = src_pts.T
|
| 246 |
+
|
| 247 |
+
if src_pts.shape != ref_pts.shape:
|
| 248 |
+
raise FaceWarpException(
|
| 249 |
+
'facial_pts and reference_pts must have the same shape')
|
| 250 |
+
|
| 251 |
+
if align_type == 'cv2_affine':
|
| 252 |
+
tfm = cv2.getAffineTransform(src_pts[0:3], ref_pts[0:3])
|
| 253 |
+
tfm_inv = cv2.getAffineTransform(ref_pts[0:3], src_pts[0:3])
|
| 254 |
+
elif align_type == 'affine':
|
| 255 |
+
tfm = get_affine_transform_matrix(src_pts, ref_pts)
|
| 256 |
+
tfm_inv = get_affine_transform_matrix(ref_pts, src_pts)
|
| 257 |
+
else:
|
| 258 |
+
params, scale = _umeyama(src_pts, ref_pts)
|
| 259 |
+
tfm = params[:2, :]
|
| 260 |
+
|
| 261 |
+
params, _ = _umeyama(ref_pts, src_pts, False, scale=1.0/scale)
|
| 262 |
+
tfm_inv = params[:2, :]
|
| 263 |
+
|
| 264 |
+
face_img = cv2.warpAffine(src_img, tfm, (crop_size[0], crop_size[1]), flags=3)
|
| 265 |
+
|
| 266 |
+
return face_img, tfm_inv
|
third_party/GPEN/distributed.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
import pickle
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
from torch import distributed as dist
|
| 6 |
+
from torch.utils.data.sampler import Sampler
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def get_rank():
|
| 10 |
+
if not dist.is_available():
|
| 11 |
+
return 0
|
| 12 |
+
|
| 13 |
+
if not dist.is_initialized():
|
| 14 |
+
return 0
|
| 15 |
+
|
| 16 |
+
return dist.get_rank()
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def synchronize():
|
| 20 |
+
if not dist.is_available():
|
| 21 |
+
return
|
| 22 |
+
|
| 23 |
+
if not dist.is_initialized():
|
| 24 |
+
return
|
| 25 |
+
|
| 26 |
+
world_size = dist.get_world_size()
|
| 27 |
+
|
| 28 |
+
if world_size == 1:
|
| 29 |
+
return
|
| 30 |
+
|
| 31 |
+
dist.barrier()
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def get_world_size():
|
| 35 |
+
if not dist.is_available():
|
| 36 |
+
return 1
|
| 37 |
+
|
| 38 |
+
if not dist.is_initialized():
|
| 39 |
+
return 1
|
| 40 |
+
|
| 41 |
+
return dist.get_world_size()
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def reduce_sum(tensor):
|
| 45 |
+
if not dist.is_available():
|
| 46 |
+
return tensor
|
| 47 |
+
|
| 48 |
+
if not dist.is_initialized():
|
| 49 |
+
return tensor
|
| 50 |
+
|
| 51 |
+
tensor = tensor.clone()
|
| 52 |
+
dist.all_reduce(tensor, op=dist.ReduceOp.SUM)
|
| 53 |
+
|
| 54 |
+
return tensor
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def gather_grad(params):
|
| 58 |
+
world_size = get_world_size()
|
| 59 |
+
|
| 60 |
+
if world_size == 1:
|
| 61 |
+
return
|
| 62 |
+
|
| 63 |
+
for param in params:
|
| 64 |
+
if param.grad is not None:
|
| 65 |
+
dist.all_reduce(param.grad.data, op=dist.ReduceOp.SUM)
|
| 66 |
+
param.grad.data.div_(world_size)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def all_gather(data):
|
| 70 |
+
world_size = get_world_size()
|
| 71 |
+
|
| 72 |
+
if world_size == 1:
|
| 73 |
+
return [data]
|
| 74 |
+
|
| 75 |
+
buffer = pickle.dumps(data)
|
| 76 |
+
storage = torch.ByteStorage.from_buffer(buffer)
|
| 77 |
+
tensor = torch.ByteTensor(storage).to('cuda')
|
| 78 |
+
|
| 79 |
+
local_size = torch.IntTensor([tensor.numel()]).to('cuda')
|
| 80 |
+
size_list = [torch.IntTensor([0]).to('cuda') for _ in range(world_size)]
|
| 81 |
+
dist.all_gather(size_list, local_size)
|
| 82 |
+
size_list = [int(size.item()) for size in size_list]
|
| 83 |
+
max_size = max(size_list)
|
| 84 |
+
|
| 85 |
+
tensor_list = []
|
| 86 |
+
for _ in size_list:
|
| 87 |
+
tensor_list.append(torch.ByteTensor(size=(max_size,)).to('cuda'))
|
| 88 |
+
|
| 89 |
+
if local_size != max_size:
|
| 90 |
+
padding = torch.ByteTensor(size=(max_size - local_size,)).to('cuda')
|
| 91 |
+
tensor = torch.cat((tensor, padding), 0)
|
| 92 |
+
|
| 93 |
+
dist.all_gather(tensor_list, tensor)
|
| 94 |
+
|
| 95 |
+
data_list = []
|
| 96 |
+
|
| 97 |
+
for size, tensor in zip(size_list, tensor_list):
|
| 98 |
+
buffer = tensor.cpu().numpy().tobytes()[:size]
|
| 99 |
+
data_list.append(pickle.loads(buffer))
|
| 100 |
+
|
| 101 |
+
return data_list
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def reduce_loss_dict(loss_dict):
|
| 105 |
+
world_size = get_world_size()
|
| 106 |
+
|
| 107 |
+
if world_size < 2:
|
| 108 |
+
return loss_dict
|
| 109 |
+
|
| 110 |
+
with torch.no_grad():
|
| 111 |
+
keys = []
|
| 112 |
+
losses = []
|
| 113 |
+
|
| 114 |
+
for k in sorted(loss_dict.keys()):
|
| 115 |
+
keys.append(k)
|
| 116 |
+
losses.append(loss_dict[k])
|
| 117 |
+
|
| 118 |
+
losses = torch.stack(losses, 0)
|
| 119 |
+
dist.reduce(losses, dst=0)
|
| 120 |
+
|
| 121 |
+
if dist.get_rank() == 0:
|
| 122 |
+
losses /= world_size
|
| 123 |
+
|
| 124 |
+
reduced_losses = {k: v for k, v in zip(keys, losses)}
|
| 125 |
+
|
| 126 |
+
return reduced_losses
|
third_party/GPEN/face_colorization.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
'''
|
| 5 |
+
import os
|
| 6 |
+
import cv2
|
| 7 |
+
import glob
|
| 8 |
+
import time
|
| 9 |
+
import numpy as np
|
| 10 |
+
from PIL import Image
|
| 11 |
+
import __init_paths
|
| 12 |
+
from face_model.face_gan import FaceGAN
|
| 13 |
+
|
| 14 |
+
class FaceColorization(object):
|
| 15 |
+
def __init__(self, base_dir='./', size=1024, model=None, channel_multiplier=2):
|
| 16 |
+
self.facegan = FaceGAN(base_dir, size, model, channel_multiplier)
|
| 17 |
+
|
| 18 |
+
# make sure the face image is well aligned. Please refer to face_enhancement.py
|
| 19 |
+
def process(self, gray):
|
| 20 |
+
# colorize the face
|
| 21 |
+
out = self.facegan.process(gray)
|
| 22 |
+
|
| 23 |
+
return out
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
if __name__=='__main__':
|
| 27 |
+
model = {'name':'GPEN-Colorization-1024', 'size':1024}
|
| 28 |
+
|
| 29 |
+
indir = 'examples/grays'
|
| 30 |
+
outdir = 'examples/outs-colorization'
|
| 31 |
+
os.makedirs(outdir, exist_ok=True)
|
| 32 |
+
|
| 33 |
+
facecolorizer = FaceColorization(size=model['size'], model=model['name'], channel_multiplier=2)
|
| 34 |
+
|
| 35 |
+
files = sorted(glob.glob(os.path.join(indir, '*.*g')))
|
| 36 |
+
for n, file in enumerate(files[:]):
|
| 37 |
+
filename = os.path.basename(file)
|
| 38 |
+
|
| 39 |
+
grayf = cv2.imread(file, cv2.IMREAD_GRAYSCALE)
|
| 40 |
+
grayf = cv2.cvtColor(grayf, cv2.COLOR_GRAY2BGR) # channel: 1->3
|
| 41 |
+
|
| 42 |
+
colorf = facecolorizer.process(grayf)
|
| 43 |
+
|
| 44 |
+
grayf = cv2.resize(grayf, colorf.shape[:2])
|
| 45 |
+
cv2.imwrite(os.path.join(outdir, '.'.join(filename.split('.')[:-1])+'.jpg'), np.hstack((grayf, colorf)))
|
| 46 |
+
|
| 47 |
+
if n%10==0: print(n, file)
|
| 48 |
+
|
third_party/GPEN/face_detect/data/FDDB/img_list.txt
ADDED
|
@@ -0,0 +1,2845 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2002/08/11/big/img_591
|
| 2 |
+
2002/08/26/big/img_265
|
| 3 |
+
2002/07/19/big/img_423
|
| 4 |
+
2002/08/24/big/img_490
|
| 5 |
+
2002/08/31/big/img_17676
|
| 6 |
+
2002/07/31/big/img_228
|
| 7 |
+
2002/07/24/big/img_402
|
| 8 |
+
2002/08/04/big/img_769
|
| 9 |
+
2002/07/19/big/img_581
|
| 10 |
+
2002/08/13/big/img_723
|
| 11 |
+
2002/08/12/big/img_821
|
| 12 |
+
2003/01/17/big/img_610
|
| 13 |
+
2002/08/13/big/img_1116
|
| 14 |
+
2002/08/28/big/img_19238
|
| 15 |
+
2002/08/21/big/img_660
|
| 16 |
+
2002/08/14/big/img_607
|
| 17 |
+
2002/08/05/big/img_3708
|
| 18 |
+
2002/08/19/big/img_511
|
| 19 |
+
2002/08/07/big/img_1316
|
| 20 |
+
2002/07/25/big/img_1047
|
| 21 |
+
2002/07/23/big/img_474
|
| 22 |
+
2002/07/27/big/img_970
|
| 23 |
+
2002/09/02/big/img_15752
|
| 24 |
+
2002/09/01/big/img_16378
|
| 25 |
+
2002/09/01/big/img_16189
|
| 26 |
+
2002/08/26/big/img_276
|
| 27 |
+
2002/07/24/big/img_518
|
| 28 |
+
2002/08/14/big/img_1027
|
| 29 |
+
2002/08/24/big/img_733
|
| 30 |
+
2002/08/15/big/img_249
|
| 31 |
+
2003/01/15/big/img_1371
|
| 32 |
+
2002/08/07/big/img_1348
|
| 33 |
+
2003/01/01/big/img_331
|
| 34 |
+
2002/08/23/big/img_536
|
| 35 |
+
2002/07/30/big/img_224
|
| 36 |
+
2002/08/10/big/img_763
|
| 37 |
+
2002/08/21/big/img_293
|
| 38 |
+
2002/08/15/big/img_1211
|
| 39 |
+
2002/08/15/big/img_1194
|
| 40 |
+
2003/01/15/big/img_390
|
| 41 |
+
2002/08/06/big/img_2893
|
| 42 |
+
2002/08/17/big/img_691
|
| 43 |
+
2002/08/07/big/img_1695
|
| 44 |
+
2002/08/16/big/img_829
|
| 45 |
+
2002/07/25/big/img_201
|
| 46 |
+
2002/08/23/big/img_36
|
| 47 |
+
2003/01/15/big/img_763
|
| 48 |
+
2003/01/15/big/img_637
|
| 49 |
+
2002/08/22/big/img_592
|
| 50 |
+
2002/07/25/big/img_817
|
| 51 |
+
2003/01/15/big/img_1219
|
| 52 |
+
2002/08/05/big/img_3508
|
| 53 |
+
2002/08/15/big/img_1108
|
| 54 |
+
2002/07/19/big/img_488
|
| 55 |
+
2003/01/16/big/img_704
|
| 56 |
+
2003/01/13/big/img_1087
|
| 57 |
+
2002/08/10/big/img_670
|
| 58 |
+
2002/07/24/big/img_104
|
| 59 |
+
2002/08/27/big/img_19823
|
| 60 |
+
2002/09/01/big/img_16229
|
| 61 |
+
2003/01/13/big/img_846
|
| 62 |
+
2002/08/04/big/img_412
|
| 63 |
+
2002/07/22/big/img_554
|
| 64 |
+
2002/08/12/big/img_331
|
| 65 |
+
2002/08/02/big/img_533
|
| 66 |
+
2002/08/12/big/img_259
|
| 67 |
+
2002/08/18/big/img_328
|
| 68 |
+
2003/01/14/big/img_630
|
| 69 |
+
2002/08/05/big/img_3541
|
| 70 |
+
2002/08/06/big/img_2390
|
| 71 |
+
2002/08/20/big/img_150
|
| 72 |
+
2002/08/02/big/img_1231
|
| 73 |
+
2002/08/16/big/img_710
|
| 74 |
+
2002/08/19/big/img_591
|
| 75 |
+
2002/07/22/big/img_725
|
| 76 |
+
2002/07/24/big/img_820
|
| 77 |
+
2003/01/13/big/img_568
|
| 78 |
+
2002/08/22/big/img_853
|
| 79 |
+
2002/08/09/big/img_648
|
| 80 |
+
2002/08/23/big/img_528
|
| 81 |
+
2003/01/14/big/img_888
|
| 82 |
+
2002/08/30/big/img_18201
|
| 83 |
+
2002/08/13/big/img_965
|
| 84 |
+
2003/01/14/big/img_660
|
| 85 |
+
2002/07/19/big/img_517
|
| 86 |
+
2003/01/14/big/img_406
|
| 87 |
+
2002/08/30/big/img_18433
|
| 88 |
+
2002/08/07/big/img_1630
|
| 89 |
+
2002/08/06/big/img_2717
|
| 90 |
+
2002/08/21/big/img_470
|
| 91 |
+
2002/07/23/big/img_633
|
| 92 |
+
2002/08/20/big/img_915
|
| 93 |
+
2002/08/16/big/img_893
|
| 94 |
+
2002/07/29/big/img_644
|
| 95 |
+
2002/08/15/big/img_529
|
| 96 |
+
2002/08/16/big/img_668
|
| 97 |
+
2002/08/07/big/img_1871
|
| 98 |
+
2002/07/25/big/img_192
|
| 99 |
+
2002/07/31/big/img_961
|
| 100 |
+
2002/08/19/big/img_738
|
| 101 |
+
2002/07/31/big/img_382
|
| 102 |
+
2002/08/19/big/img_298
|
| 103 |
+
2003/01/17/big/img_608
|
| 104 |
+
2002/08/21/big/img_514
|
| 105 |
+
2002/07/23/big/img_183
|
| 106 |
+
2003/01/17/big/img_536
|
| 107 |
+
2002/07/24/big/img_478
|
| 108 |
+
2002/08/06/big/img_2997
|
| 109 |
+
2002/09/02/big/img_15380
|
| 110 |
+
2002/08/07/big/img_1153
|
| 111 |
+
2002/07/31/big/img_967
|
| 112 |
+
2002/07/31/big/img_711
|
| 113 |
+
2002/08/26/big/img_664
|
| 114 |
+
2003/01/01/big/img_326
|
| 115 |
+
2002/08/24/big/img_775
|
| 116 |
+
2002/08/08/big/img_961
|
| 117 |
+
2002/08/16/big/img_77
|
| 118 |
+
2002/08/12/big/img_296
|
| 119 |
+
2002/07/22/big/img_905
|
| 120 |
+
2003/01/13/big/img_284
|
| 121 |
+
2002/08/13/big/img_887
|
| 122 |
+
2002/08/24/big/img_849
|
| 123 |
+
2002/07/30/big/img_345
|
| 124 |
+
2002/08/18/big/img_419
|
| 125 |
+
2002/08/01/big/img_1347
|
| 126 |
+
2002/08/05/big/img_3670
|
| 127 |
+
2002/07/21/big/img_479
|
| 128 |
+
2002/08/08/big/img_913
|
| 129 |
+
2002/09/02/big/img_15828
|
| 130 |
+
2002/08/30/big/img_18194
|
| 131 |
+
2002/08/08/big/img_471
|
| 132 |
+
2002/08/22/big/img_734
|
| 133 |
+
2002/08/09/big/img_586
|
| 134 |
+
2002/08/09/big/img_454
|
| 135 |
+
2002/07/29/big/img_47
|
| 136 |
+
2002/07/19/big/img_381
|
| 137 |
+
2002/07/29/big/img_733
|
| 138 |
+
2002/08/20/big/img_327
|
| 139 |
+
2002/07/21/big/img_96
|
| 140 |
+
2002/08/06/big/img_2680
|
| 141 |
+
2002/07/25/big/img_919
|
| 142 |
+
2002/07/21/big/img_158
|
| 143 |
+
2002/07/22/big/img_801
|
| 144 |
+
2002/07/22/big/img_567
|
| 145 |
+
2002/07/24/big/img_804
|
| 146 |
+
2002/07/24/big/img_690
|
| 147 |
+
2003/01/15/big/img_576
|
| 148 |
+
2002/08/14/big/img_335
|
| 149 |
+
2003/01/13/big/img_390
|
| 150 |
+
2002/08/11/big/img_258
|
| 151 |
+
2002/07/23/big/img_917
|
| 152 |
+
2002/08/15/big/img_525
|
| 153 |
+
2003/01/15/big/img_505
|
| 154 |
+
2002/07/30/big/img_886
|
| 155 |
+
2003/01/16/big/img_640
|
| 156 |
+
2003/01/14/big/img_642
|
| 157 |
+
2003/01/17/big/img_844
|
| 158 |
+
2002/08/04/big/img_571
|
| 159 |
+
2002/08/29/big/img_18702
|
| 160 |
+
2003/01/15/big/img_240
|
| 161 |
+
2002/07/29/big/img_553
|
| 162 |
+
2002/08/10/big/img_354
|
| 163 |
+
2002/08/18/big/img_17
|
| 164 |
+
2003/01/15/big/img_782
|
| 165 |
+
2002/07/27/big/img_382
|
| 166 |
+
2002/08/14/big/img_970
|
| 167 |
+
2003/01/16/big/img_70
|
| 168 |
+
2003/01/16/big/img_625
|
| 169 |
+
2002/08/18/big/img_341
|
| 170 |
+
2002/08/26/big/img_188
|
| 171 |
+
2002/08/09/big/img_405
|
| 172 |
+
2002/08/02/big/img_37
|
| 173 |
+
2002/08/13/big/img_748
|
| 174 |
+
2002/07/22/big/img_399
|
| 175 |
+
2002/07/25/big/img_844
|
| 176 |
+
2002/08/12/big/img_340
|
| 177 |
+
2003/01/13/big/img_815
|
| 178 |
+
2002/08/26/big/img_5
|
| 179 |
+
2002/08/10/big/img_158
|
| 180 |
+
2002/08/18/big/img_95
|
| 181 |
+
2002/07/29/big/img_1297
|
| 182 |
+
2003/01/13/big/img_508
|
| 183 |
+
2002/09/01/big/img_16680
|
| 184 |
+
2003/01/16/big/img_338
|
| 185 |
+
2002/08/13/big/img_517
|
| 186 |
+
2002/07/22/big/img_626
|
| 187 |
+
2002/08/06/big/img_3024
|
| 188 |
+
2002/07/26/big/img_499
|
| 189 |
+
2003/01/13/big/img_387
|
| 190 |
+
2002/08/31/big/img_18025
|
| 191 |
+
2002/08/13/big/img_520
|
| 192 |
+
2003/01/16/big/img_576
|
| 193 |
+
2002/07/26/big/img_121
|
| 194 |
+
2002/08/25/big/img_703
|
| 195 |
+
2002/08/26/big/img_615
|
| 196 |
+
2002/08/17/big/img_434
|
| 197 |
+
2002/08/02/big/img_677
|
| 198 |
+
2002/08/18/big/img_276
|
| 199 |
+
2002/08/05/big/img_3672
|
| 200 |
+
2002/07/26/big/img_700
|
| 201 |
+
2002/07/31/big/img_277
|
| 202 |
+
2003/01/14/big/img_220
|
| 203 |
+
2002/08/23/big/img_232
|
| 204 |
+
2002/08/31/big/img_17422
|
| 205 |
+
2002/07/22/big/img_508
|
| 206 |
+
2002/08/13/big/img_681
|
| 207 |
+
2003/01/15/big/img_638
|
| 208 |
+
2002/08/30/big/img_18408
|
| 209 |
+
2003/01/14/big/img_533
|
| 210 |
+
2003/01/17/big/img_12
|
| 211 |
+
2002/08/28/big/img_19388
|
| 212 |
+
2002/08/08/big/img_133
|
| 213 |
+
2002/07/26/big/img_885
|
| 214 |
+
2002/08/19/big/img_387
|
| 215 |
+
2002/08/27/big/img_19976
|
| 216 |
+
2002/08/26/big/img_118
|
| 217 |
+
2002/08/28/big/img_19146
|
| 218 |
+
2002/08/05/big/img_3259
|
| 219 |
+
2002/08/15/big/img_536
|
| 220 |
+
2002/07/22/big/img_279
|
| 221 |
+
2002/07/22/big/img_9
|
| 222 |
+
2002/08/13/big/img_301
|
| 223 |
+
2002/08/15/big/img_974
|
| 224 |
+
2002/08/06/big/img_2355
|
| 225 |
+
2002/08/01/big/img_1526
|
| 226 |
+
2002/08/03/big/img_417
|
| 227 |
+
2002/08/04/big/img_407
|
| 228 |
+
2002/08/15/big/img_1029
|
| 229 |
+
2002/07/29/big/img_700
|
| 230 |
+
2002/08/01/big/img_1463
|
| 231 |
+
2002/08/31/big/img_17365
|
| 232 |
+
2002/07/28/big/img_223
|
| 233 |
+
2002/07/19/big/img_827
|
| 234 |
+
2002/07/27/big/img_531
|
| 235 |
+
2002/07/19/big/img_845
|
| 236 |
+
2002/08/20/big/img_382
|
| 237 |
+
2002/07/31/big/img_268
|
| 238 |
+
2002/08/27/big/img_19705
|
| 239 |
+
2002/08/02/big/img_830
|
| 240 |
+
2002/08/23/big/img_250
|
| 241 |
+
2002/07/20/big/img_777
|
| 242 |
+
2002/08/21/big/img_879
|
| 243 |
+
2002/08/26/big/img_20146
|
| 244 |
+
2002/08/23/big/img_789
|
| 245 |
+
2002/08/06/big/img_2683
|
| 246 |
+
2002/08/25/big/img_576
|
| 247 |
+
2002/08/09/big/img_498
|
| 248 |
+
2002/08/08/big/img_384
|
| 249 |
+
2002/08/26/big/img_592
|
| 250 |
+
2002/07/29/big/img_1470
|
| 251 |
+
2002/08/21/big/img_452
|
| 252 |
+
2002/08/30/big/img_18395
|
| 253 |
+
2002/08/15/big/img_215
|
| 254 |
+
2002/07/21/big/img_643
|
| 255 |
+
2002/07/22/big/img_209
|
| 256 |
+
2003/01/17/big/img_346
|
| 257 |
+
2002/08/25/big/img_658
|
| 258 |
+
2002/08/21/big/img_221
|
| 259 |
+
2002/08/14/big/img_60
|
| 260 |
+
2003/01/17/big/img_885
|
| 261 |
+
2003/01/16/big/img_482
|
| 262 |
+
2002/08/19/big/img_593
|
| 263 |
+
2002/08/08/big/img_233
|
| 264 |
+
2002/07/30/big/img_458
|
| 265 |
+
2002/07/23/big/img_384
|
| 266 |
+
2003/01/15/big/img_670
|
| 267 |
+
2003/01/15/big/img_267
|
| 268 |
+
2002/08/26/big/img_540
|
| 269 |
+
2002/07/29/big/img_552
|
| 270 |
+
2002/07/30/big/img_997
|
| 271 |
+
2003/01/17/big/img_377
|
| 272 |
+
2002/08/21/big/img_265
|
| 273 |
+
2002/08/09/big/img_561
|
| 274 |
+
2002/07/31/big/img_945
|
| 275 |
+
2002/09/02/big/img_15252
|
| 276 |
+
2002/08/11/big/img_276
|
| 277 |
+
2002/07/22/big/img_491
|
| 278 |
+
2002/07/26/big/img_517
|
| 279 |
+
2002/08/14/big/img_726
|
| 280 |
+
2002/08/08/big/img_46
|
| 281 |
+
2002/08/28/big/img_19458
|
| 282 |
+
2002/08/06/big/img_2935
|
| 283 |
+
2002/07/29/big/img_1392
|
| 284 |
+
2002/08/13/big/img_776
|
| 285 |
+
2002/08/24/big/img_616
|
| 286 |
+
2002/08/14/big/img_1065
|
| 287 |
+
2002/07/29/big/img_889
|
| 288 |
+
2002/08/18/big/img_188
|
| 289 |
+
2002/08/07/big/img_1453
|
| 290 |
+
2002/08/02/big/img_760
|
| 291 |
+
2002/07/28/big/img_416
|
| 292 |
+
2002/08/07/big/img_1393
|
| 293 |
+
2002/08/26/big/img_292
|
| 294 |
+
2002/08/26/big/img_301
|
| 295 |
+
2003/01/13/big/img_195
|
| 296 |
+
2002/07/26/big/img_532
|
| 297 |
+
2002/08/20/big/img_550
|
| 298 |
+
2002/08/05/big/img_3658
|
| 299 |
+
2002/08/26/big/img_738
|
| 300 |
+
2002/09/02/big/img_15750
|
| 301 |
+
2003/01/17/big/img_451
|
| 302 |
+
2002/07/23/big/img_339
|
| 303 |
+
2002/08/16/big/img_637
|
| 304 |
+
2002/08/14/big/img_748
|
| 305 |
+
2002/08/06/big/img_2739
|
| 306 |
+
2002/07/25/big/img_482
|
| 307 |
+
2002/08/19/big/img_191
|
| 308 |
+
2002/08/26/big/img_537
|
| 309 |
+
2003/01/15/big/img_716
|
| 310 |
+
2003/01/15/big/img_767
|
| 311 |
+
2002/08/02/big/img_452
|
| 312 |
+
2002/08/08/big/img_1011
|
| 313 |
+
2002/08/10/big/img_144
|
| 314 |
+
2003/01/14/big/img_122
|
| 315 |
+
2002/07/24/big/img_586
|
| 316 |
+
2002/07/24/big/img_762
|
| 317 |
+
2002/08/20/big/img_369
|
| 318 |
+
2002/07/30/big/img_146
|
| 319 |
+
2002/08/23/big/img_396
|
| 320 |
+
2003/01/15/big/img_200
|
| 321 |
+
2002/08/15/big/img_1183
|
| 322 |
+
2003/01/14/big/img_698
|
| 323 |
+
2002/08/09/big/img_792
|
| 324 |
+
2002/08/06/big/img_2347
|
| 325 |
+
2002/07/31/big/img_911
|
| 326 |
+
2002/08/26/big/img_722
|
| 327 |
+
2002/08/23/big/img_621
|
| 328 |
+
2002/08/05/big/img_3790
|
| 329 |
+
2003/01/13/big/img_633
|
| 330 |
+
2002/08/09/big/img_224
|
| 331 |
+
2002/07/24/big/img_454
|
| 332 |
+
2002/07/21/big/img_202
|
| 333 |
+
2002/08/02/big/img_630
|
| 334 |
+
2002/08/30/big/img_18315
|
| 335 |
+
2002/07/19/big/img_491
|
| 336 |
+
2002/09/01/big/img_16456
|
| 337 |
+
2002/08/09/big/img_242
|
| 338 |
+
2002/07/25/big/img_595
|
| 339 |
+
2002/07/22/big/img_522
|
| 340 |
+
2002/08/01/big/img_1593
|
| 341 |
+
2002/07/29/big/img_336
|
| 342 |
+
2002/08/15/big/img_448
|
| 343 |
+
2002/08/28/big/img_19281
|
| 344 |
+
2002/07/29/big/img_342
|
| 345 |
+
2002/08/12/big/img_78
|
| 346 |
+
2003/01/14/big/img_525
|
| 347 |
+
2002/07/28/big/img_147
|
| 348 |
+
2002/08/11/big/img_353
|
| 349 |
+
2002/08/22/big/img_513
|
| 350 |
+
2002/08/04/big/img_721
|
| 351 |
+
2002/08/17/big/img_247
|
| 352 |
+
2003/01/14/big/img_891
|
| 353 |
+
2002/08/20/big/img_853
|
| 354 |
+
2002/07/19/big/img_414
|
| 355 |
+
2002/08/01/big/img_1530
|
| 356 |
+
2003/01/14/big/img_924
|
| 357 |
+
2002/08/22/big/img_468
|
| 358 |
+
2002/08/18/big/img_354
|
| 359 |
+
2002/08/30/big/img_18193
|
| 360 |
+
2002/08/23/big/img_492
|
| 361 |
+
2002/08/15/big/img_871
|
| 362 |
+
2002/08/12/big/img_494
|
| 363 |
+
2002/08/06/big/img_2470
|
| 364 |
+
2002/07/23/big/img_923
|
| 365 |
+
2002/08/26/big/img_155
|
| 366 |
+
2002/08/08/big/img_669
|
| 367 |
+
2002/07/23/big/img_404
|
| 368 |
+
2002/08/28/big/img_19421
|
| 369 |
+
2002/08/29/big/img_18993
|
| 370 |
+
2002/08/25/big/img_416
|
| 371 |
+
2003/01/17/big/img_434
|
| 372 |
+
2002/07/29/big/img_1370
|
| 373 |
+
2002/07/28/big/img_483
|
| 374 |
+
2002/08/11/big/img_50
|
| 375 |
+
2002/08/10/big/img_404
|
| 376 |
+
2002/09/02/big/img_15057
|
| 377 |
+
2003/01/14/big/img_911
|
| 378 |
+
2002/09/01/big/img_16697
|
| 379 |
+
2003/01/16/big/img_665
|
| 380 |
+
2002/09/01/big/img_16708
|
| 381 |
+
2002/08/22/big/img_612
|
| 382 |
+
2002/08/28/big/img_19471
|
| 383 |
+
2002/08/02/big/img_198
|
| 384 |
+
2003/01/16/big/img_527
|
| 385 |
+
2002/08/22/big/img_209
|
| 386 |
+
2002/08/30/big/img_18205
|
| 387 |
+
2003/01/14/big/img_114
|
| 388 |
+
2003/01/14/big/img_1028
|
| 389 |
+
2003/01/16/big/img_894
|
| 390 |
+
2003/01/14/big/img_837
|
| 391 |
+
2002/07/30/big/img_9
|
| 392 |
+
2002/08/06/big/img_2821
|
| 393 |
+
2002/08/04/big/img_85
|
| 394 |
+
2003/01/13/big/img_884
|
| 395 |
+
2002/07/22/big/img_570
|
| 396 |
+
2002/08/07/big/img_1773
|
| 397 |
+
2002/07/26/big/img_208
|
| 398 |
+
2003/01/17/big/img_946
|
| 399 |
+
2002/07/19/big/img_930
|
| 400 |
+
2003/01/01/big/img_698
|
| 401 |
+
2003/01/17/big/img_612
|
| 402 |
+
2002/07/19/big/img_372
|
| 403 |
+
2002/07/30/big/img_721
|
| 404 |
+
2003/01/14/big/img_649
|
| 405 |
+
2002/08/19/big/img_4
|
| 406 |
+
2002/07/25/big/img_1024
|
| 407 |
+
2003/01/15/big/img_601
|
| 408 |
+
2002/08/30/big/img_18470
|
| 409 |
+
2002/07/22/big/img_29
|
| 410 |
+
2002/08/07/big/img_1686
|
| 411 |
+
2002/07/20/big/img_294
|
| 412 |
+
2002/08/14/big/img_800
|
| 413 |
+
2002/08/19/big/img_353
|
| 414 |
+
2002/08/19/big/img_350
|
| 415 |
+
2002/08/05/big/img_3392
|
| 416 |
+
2002/08/09/big/img_622
|
| 417 |
+
2003/01/15/big/img_236
|
| 418 |
+
2002/08/11/big/img_643
|
| 419 |
+
2002/08/05/big/img_3458
|
| 420 |
+
2002/08/12/big/img_413
|
| 421 |
+
2002/08/22/big/img_415
|
| 422 |
+
2002/08/13/big/img_635
|
| 423 |
+
2002/08/07/big/img_1198
|
| 424 |
+
2002/08/04/big/img_873
|
| 425 |
+
2002/08/12/big/img_407
|
| 426 |
+
2003/01/15/big/img_346
|
| 427 |
+
2002/08/02/big/img_275
|
| 428 |
+
2002/08/17/big/img_997
|
| 429 |
+
2002/08/21/big/img_958
|
| 430 |
+
2002/08/20/big/img_579
|
| 431 |
+
2002/07/29/big/img_142
|
| 432 |
+
2003/01/14/big/img_1115
|
| 433 |
+
2002/08/16/big/img_365
|
| 434 |
+
2002/07/29/big/img_1414
|
| 435 |
+
2002/08/17/big/img_489
|
| 436 |
+
2002/08/13/big/img_1010
|
| 437 |
+
2002/07/31/big/img_276
|
| 438 |
+
2002/07/25/big/img_1000
|
| 439 |
+
2002/08/23/big/img_524
|
| 440 |
+
2002/08/28/big/img_19147
|
| 441 |
+
2003/01/13/big/img_433
|
| 442 |
+
2002/08/20/big/img_205
|
| 443 |
+
2003/01/01/big/img_458
|
| 444 |
+
2002/07/29/big/img_1449
|
| 445 |
+
2003/01/16/big/img_696
|
| 446 |
+
2002/08/28/big/img_19296
|
| 447 |
+
2002/08/29/big/img_18688
|
| 448 |
+
2002/08/21/big/img_767
|
| 449 |
+
2002/08/20/big/img_532
|
| 450 |
+
2002/08/26/big/img_187
|
| 451 |
+
2002/07/26/big/img_183
|
| 452 |
+
2002/07/27/big/img_890
|
| 453 |
+
2003/01/13/big/img_576
|
| 454 |
+
2002/07/30/big/img_15
|
| 455 |
+
2002/07/31/big/img_889
|
| 456 |
+
2002/08/31/big/img_17759
|
| 457 |
+
2003/01/14/big/img_1114
|
| 458 |
+
2002/07/19/big/img_445
|
| 459 |
+
2002/08/03/big/img_593
|
| 460 |
+
2002/07/24/big/img_750
|
| 461 |
+
2002/07/30/big/img_133
|
| 462 |
+
2002/08/25/big/img_671
|
| 463 |
+
2002/07/20/big/img_351
|
| 464 |
+
2002/08/31/big/img_17276
|
| 465 |
+
2002/08/05/big/img_3231
|
| 466 |
+
2002/09/02/big/img_15882
|
| 467 |
+
2002/08/14/big/img_115
|
| 468 |
+
2002/08/02/big/img_1148
|
| 469 |
+
2002/07/25/big/img_936
|
| 470 |
+
2002/07/31/big/img_639
|
| 471 |
+
2002/08/04/big/img_427
|
| 472 |
+
2002/08/22/big/img_843
|
| 473 |
+
2003/01/17/big/img_17
|
| 474 |
+
2003/01/13/big/img_690
|
| 475 |
+
2002/08/13/big/img_472
|
| 476 |
+
2002/08/09/big/img_425
|
| 477 |
+
2002/08/05/big/img_3450
|
| 478 |
+
2003/01/17/big/img_439
|
| 479 |
+
2002/08/13/big/img_539
|
| 480 |
+
2002/07/28/big/img_35
|
| 481 |
+
2002/08/16/big/img_241
|
| 482 |
+
2002/08/06/big/img_2898
|
| 483 |
+
2003/01/16/big/img_429
|
| 484 |
+
2002/08/05/big/img_3817
|
| 485 |
+
2002/08/27/big/img_19919
|
| 486 |
+
2002/07/19/big/img_422
|
| 487 |
+
2002/08/15/big/img_560
|
| 488 |
+
2002/07/23/big/img_750
|
| 489 |
+
2002/07/30/big/img_353
|
| 490 |
+
2002/08/05/big/img_43
|
| 491 |
+
2002/08/23/big/img_305
|
| 492 |
+
2002/08/01/big/img_2137
|
| 493 |
+
2002/08/30/big/img_18097
|
| 494 |
+
2002/08/01/big/img_1389
|
| 495 |
+
2002/08/02/big/img_308
|
| 496 |
+
2003/01/14/big/img_652
|
| 497 |
+
2002/08/01/big/img_1798
|
| 498 |
+
2003/01/14/big/img_732
|
| 499 |
+
2003/01/16/big/img_294
|
| 500 |
+
2002/08/26/big/img_213
|
| 501 |
+
2002/07/24/big/img_842
|
| 502 |
+
2003/01/13/big/img_630
|
| 503 |
+
2003/01/13/big/img_634
|
| 504 |
+
2002/08/06/big/img_2285
|
| 505 |
+
2002/08/01/big/img_2162
|
| 506 |
+
2002/08/30/big/img_18134
|
| 507 |
+
2002/08/02/big/img_1045
|
| 508 |
+
2002/08/01/big/img_2143
|
| 509 |
+
2002/07/25/big/img_135
|
| 510 |
+
2002/07/20/big/img_645
|
| 511 |
+
2002/08/05/big/img_3666
|
| 512 |
+
2002/08/14/big/img_523
|
| 513 |
+
2002/08/04/big/img_425
|
| 514 |
+
2003/01/14/big/img_137
|
| 515 |
+
2003/01/01/big/img_176
|
| 516 |
+
2002/08/15/big/img_505
|
| 517 |
+
2002/08/24/big/img_386
|
| 518 |
+
2002/08/05/big/img_3187
|
| 519 |
+
2002/08/15/big/img_419
|
| 520 |
+
2003/01/13/big/img_520
|
| 521 |
+
2002/08/04/big/img_444
|
| 522 |
+
2002/08/26/big/img_483
|
| 523 |
+
2002/08/05/big/img_3449
|
| 524 |
+
2002/08/30/big/img_18409
|
| 525 |
+
2002/08/28/big/img_19455
|
| 526 |
+
2002/08/27/big/img_20090
|
| 527 |
+
2002/07/23/big/img_625
|
| 528 |
+
2002/08/24/big/img_205
|
| 529 |
+
2002/08/08/big/img_938
|
| 530 |
+
2003/01/13/big/img_527
|
| 531 |
+
2002/08/07/big/img_1712
|
| 532 |
+
2002/07/24/big/img_801
|
| 533 |
+
2002/08/09/big/img_579
|
| 534 |
+
2003/01/14/big/img_41
|
| 535 |
+
2003/01/15/big/img_1130
|
| 536 |
+
2002/07/21/big/img_672
|
| 537 |
+
2002/08/07/big/img_1590
|
| 538 |
+
2003/01/01/big/img_532
|
| 539 |
+
2002/08/02/big/img_529
|
| 540 |
+
2002/08/05/big/img_3591
|
| 541 |
+
2002/08/23/big/img_5
|
| 542 |
+
2003/01/14/big/img_882
|
| 543 |
+
2002/08/28/big/img_19234
|
| 544 |
+
2002/07/24/big/img_398
|
| 545 |
+
2003/01/14/big/img_592
|
| 546 |
+
2002/08/22/big/img_548
|
| 547 |
+
2002/08/12/big/img_761
|
| 548 |
+
2003/01/16/big/img_497
|
| 549 |
+
2002/08/18/big/img_133
|
| 550 |
+
2002/08/08/big/img_874
|
| 551 |
+
2002/07/19/big/img_247
|
| 552 |
+
2002/08/15/big/img_170
|
| 553 |
+
2002/08/27/big/img_19679
|
| 554 |
+
2002/08/20/big/img_246
|
| 555 |
+
2002/08/24/big/img_358
|
| 556 |
+
2002/07/29/big/img_599
|
| 557 |
+
2002/08/01/big/img_1555
|
| 558 |
+
2002/07/30/big/img_491
|
| 559 |
+
2002/07/30/big/img_371
|
| 560 |
+
2003/01/16/big/img_682
|
| 561 |
+
2002/07/25/big/img_619
|
| 562 |
+
2003/01/15/big/img_587
|
| 563 |
+
2002/08/02/big/img_1212
|
| 564 |
+
2002/08/01/big/img_2152
|
| 565 |
+
2002/07/25/big/img_668
|
| 566 |
+
2003/01/16/big/img_574
|
| 567 |
+
2002/08/28/big/img_19464
|
| 568 |
+
2002/08/11/big/img_536
|
| 569 |
+
2002/07/24/big/img_201
|
| 570 |
+
2002/08/05/big/img_3488
|
| 571 |
+
2002/07/25/big/img_887
|
| 572 |
+
2002/07/22/big/img_789
|
| 573 |
+
2002/07/30/big/img_432
|
| 574 |
+
2002/08/16/big/img_166
|
| 575 |
+
2002/09/01/big/img_16333
|
| 576 |
+
2002/07/26/big/img_1010
|
| 577 |
+
2002/07/21/big/img_793
|
| 578 |
+
2002/07/22/big/img_720
|
| 579 |
+
2002/07/31/big/img_337
|
| 580 |
+
2002/07/27/big/img_185
|
| 581 |
+
2002/08/23/big/img_440
|
| 582 |
+
2002/07/31/big/img_801
|
| 583 |
+
2002/07/25/big/img_478
|
| 584 |
+
2003/01/14/big/img_171
|
| 585 |
+
2002/08/07/big/img_1054
|
| 586 |
+
2002/09/02/big/img_15659
|
| 587 |
+
2002/07/29/big/img_1348
|
| 588 |
+
2002/08/09/big/img_337
|
| 589 |
+
2002/08/26/big/img_684
|
| 590 |
+
2002/07/31/big/img_537
|
| 591 |
+
2002/08/15/big/img_808
|
| 592 |
+
2003/01/13/big/img_740
|
| 593 |
+
2002/08/07/big/img_1667
|
| 594 |
+
2002/08/03/big/img_404
|
| 595 |
+
2002/08/06/big/img_2520
|
| 596 |
+
2002/07/19/big/img_230
|
| 597 |
+
2002/07/19/big/img_356
|
| 598 |
+
2003/01/16/big/img_627
|
| 599 |
+
2002/08/04/big/img_474
|
| 600 |
+
2002/07/29/big/img_833
|
| 601 |
+
2002/07/25/big/img_176
|
| 602 |
+
2002/08/01/big/img_1684
|
| 603 |
+
2002/08/21/big/img_643
|
| 604 |
+
2002/08/27/big/img_19673
|
| 605 |
+
2002/08/02/big/img_838
|
| 606 |
+
2002/08/06/big/img_2378
|
| 607 |
+
2003/01/15/big/img_48
|
| 608 |
+
2002/07/30/big/img_470
|
| 609 |
+
2002/08/15/big/img_963
|
| 610 |
+
2002/08/24/big/img_444
|
| 611 |
+
2002/08/16/big/img_662
|
| 612 |
+
2002/08/15/big/img_1209
|
| 613 |
+
2002/07/24/big/img_25
|
| 614 |
+
2002/08/06/big/img_2740
|
| 615 |
+
2002/07/29/big/img_996
|
| 616 |
+
2002/08/31/big/img_18074
|
| 617 |
+
2002/08/04/big/img_343
|
| 618 |
+
2003/01/17/big/img_509
|
| 619 |
+
2003/01/13/big/img_726
|
| 620 |
+
2002/08/07/big/img_1466
|
| 621 |
+
2002/07/26/big/img_307
|
| 622 |
+
2002/08/10/big/img_598
|
| 623 |
+
2002/08/13/big/img_890
|
| 624 |
+
2002/08/14/big/img_997
|
| 625 |
+
2002/07/19/big/img_392
|
| 626 |
+
2002/08/02/big/img_475
|
| 627 |
+
2002/08/29/big/img_19038
|
| 628 |
+
2002/07/29/big/img_538
|
| 629 |
+
2002/07/29/big/img_502
|
| 630 |
+
2002/08/02/big/img_364
|
| 631 |
+
2002/08/31/big/img_17353
|
| 632 |
+
2002/08/08/big/img_539
|
| 633 |
+
2002/08/01/big/img_1449
|
| 634 |
+
2002/07/22/big/img_363
|
| 635 |
+
2002/08/02/big/img_90
|
| 636 |
+
2002/09/01/big/img_16867
|
| 637 |
+
2002/08/05/big/img_3371
|
| 638 |
+
2002/07/30/big/img_342
|
| 639 |
+
2002/08/07/big/img_1363
|
| 640 |
+
2002/08/22/big/img_790
|
| 641 |
+
2003/01/15/big/img_404
|
| 642 |
+
2002/08/05/big/img_3447
|
| 643 |
+
2002/09/01/big/img_16167
|
| 644 |
+
2003/01/13/big/img_840
|
| 645 |
+
2002/08/22/big/img_1001
|
| 646 |
+
2002/08/09/big/img_431
|
| 647 |
+
2002/07/27/big/img_618
|
| 648 |
+
2002/07/31/big/img_741
|
| 649 |
+
2002/07/30/big/img_964
|
| 650 |
+
2002/07/25/big/img_86
|
| 651 |
+
2002/07/29/big/img_275
|
| 652 |
+
2002/08/21/big/img_921
|
| 653 |
+
2002/07/26/big/img_892
|
| 654 |
+
2002/08/21/big/img_663
|
| 655 |
+
2003/01/13/big/img_567
|
| 656 |
+
2003/01/14/big/img_719
|
| 657 |
+
2002/07/28/big/img_251
|
| 658 |
+
2003/01/15/big/img_1123
|
| 659 |
+
2002/07/29/big/img_260
|
| 660 |
+
2002/08/24/big/img_337
|
| 661 |
+
2002/08/01/big/img_1914
|
| 662 |
+
2002/08/13/big/img_373
|
| 663 |
+
2003/01/15/big/img_589
|
| 664 |
+
2002/08/13/big/img_906
|
| 665 |
+
2002/07/26/big/img_270
|
| 666 |
+
2002/08/26/big/img_313
|
| 667 |
+
2002/08/25/big/img_694
|
| 668 |
+
2003/01/01/big/img_327
|
| 669 |
+
2002/07/23/big/img_261
|
| 670 |
+
2002/08/26/big/img_642
|
| 671 |
+
2002/07/29/big/img_918
|
| 672 |
+
2002/07/23/big/img_455
|
| 673 |
+
2002/07/24/big/img_612
|
| 674 |
+
2002/07/23/big/img_534
|
| 675 |
+
2002/07/19/big/img_534
|
| 676 |
+
2002/07/19/big/img_726
|
| 677 |
+
2002/08/01/big/img_2146
|
| 678 |
+
2002/08/02/big/img_543
|
| 679 |
+
2003/01/16/big/img_777
|
| 680 |
+
2002/07/30/big/img_484
|
| 681 |
+
2002/08/13/big/img_1161
|
| 682 |
+
2002/07/21/big/img_390
|
| 683 |
+
2002/08/06/big/img_2288
|
| 684 |
+
2002/08/21/big/img_677
|
| 685 |
+
2002/08/13/big/img_747
|
| 686 |
+
2002/08/15/big/img_1248
|
| 687 |
+
2002/07/31/big/img_416
|
| 688 |
+
2002/09/02/big/img_15259
|
| 689 |
+
2002/08/16/big/img_781
|
| 690 |
+
2002/08/24/big/img_754
|
| 691 |
+
2002/07/24/big/img_803
|
| 692 |
+
2002/08/20/big/img_609
|
| 693 |
+
2002/08/28/big/img_19571
|
| 694 |
+
2002/09/01/big/img_16140
|
| 695 |
+
2002/08/26/big/img_769
|
| 696 |
+
2002/07/20/big/img_588
|
| 697 |
+
2002/08/02/big/img_898
|
| 698 |
+
2002/07/21/big/img_466
|
| 699 |
+
2002/08/14/big/img_1046
|
| 700 |
+
2002/07/25/big/img_212
|
| 701 |
+
2002/08/26/big/img_353
|
| 702 |
+
2002/08/19/big/img_810
|
| 703 |
+
2002/08/31/big/img_17824
|
| 704 |
+
2002/08/12/big/img_631
|
| 705 |
+
2002/07/19/big/img_828
|
| 706 |
+
2002/07/24/big/img_130
|
| 707 |
+
2002/08/25/big/img_580
|
| 708 |
+
2002/07/31/big/img_699
|
| 709 |
+
2002/07/23/big/img_808
|
| 710 |
+
2002/07/31/big/img_377
|
| 711 |
+
2003/01/16/big/img_570
|
| 712 |
+
2002/09/01/big/img_16254
|
| 713 |
+
2002/07/21/big/img_471
|
| 714 |
+
2002/08/01/big/img_1548
|
| 715 |
+
2002/08/18/big/img_252
|
| 716 |
+
2002/08/19/big/img_576
|
| 717 |
+
2002/08/20/big/img_464
|
| 718 |
+
2002/07/27/big/img_735
|
| 719 |
+
2002/08/21/big/img_589
|
| 720 |
+
2003/01/15/big/img_1192
|
| 721 |
+
2002/08/09/big/img_302
|
| 722 |
+
2002/07/31/big/img_594
|
| 723 |
+
2002/08/23/big/img_19
|
| 724 |
+
2002/08/29/big/img_18819
|
| 725 |
+
2002/08/19/big/img_293
|
| 726 |
+
2002/07/30/big/img_331
|
| 727 |
+
2002/08/23/big/img_607
|
| 728 |
+
2002/07/30/big/img_363
|
| 729 |
+
2002/08/16/big/img_766
|
| 730 |
+
2003/01/13/big/img_481
|
| 731 |
+
2002/08/06/big/img_2515
|
| 732 |
+
2002/09/02/big/img_15913
|
| 733 |
+
2002/09/02/big/img_15827
|
| 734 |
+
2002/09/02/big/img_15053
|
| 735 |
+
2002/08/07/big/img_1576
|
| 736 |
+
2002/07/23/big/img_268
|
| 737 |
+
2002/08/21/big/img_152
|
| 738 |
+
2003/01/15/big/img_578
|
| 739 |
+
2002/07/21/big/img_589
|
| 740 |
+
2002/07/20/big/img_548
|
| 741 |
+
2002/08/27/big/img_19693
|
| 742 |
+
2002/08/31/big/img_17252
|
| 743 |
+
2002/07/31/big/img_138
|
| 744 |
+
2002/07/23/big/img_372
|
| 745 |
+
2002/08/16/big/img_695
|
| 746 |
+
2002/07/27/big/img_287
|
| 747 |
+
2002/08/15/big/img_315
|
| 748 |
+
2002/08/10/big/img_361
|
| 749 |
+
2002/07/29/big/img_899
|
| 750 |
+
2002/08/13/big/img_771
|
| 751 |
+
2002/08/21/big/img_92
|
| 752 |
+
2003/01/15/big/img_425
|
| 753 |
+
2003/01/16/big/img_450
|
| 754 |
+
2002/09/01/big/img_16942
|
| 755 |
+
2002/08/02/big/img_51
|
| 756 |
+
2002/09/02/big/img_15379
|
| 757 |
+
2002/08/24/big/img_147
|
| 758 |
+
2002/08/30/big/img_18122
|
| 759 |
+
2002/07/26/big/img_950
|
| 760 |
+
2002/08/07/big/img_1400
|
| 761 |
+
2002/08/17/big/img_468
|
| 762 |
+
2002/08/15/big/img_470
|
| 763 |
+
2002/07/30/big/img_318
|
| 764 |
+
2002/07/22/big/img_644
|
| 765 |
+
2002/08/27/big/img_19732
|
| 766 |
+
2002/07/23/big/img_601
|
| 767 |
+
2002/08/26/big/img_398
|
| 768 |
+
2002/08/21/big/img_428
|
| 769 |
+
2002/08/06/big/img_2119
|
| 770 |
+
2002/08/29/big/img_19103
|
| 771 |
+
2003/01/14/big/img_933
|
| 772 |
+
2002/08/11/big/img_674
|
| 773 |
+
2002/08/28/big/img_19420
|
| 774 |
+
2002/08/03/big/img_418
|
| 775 |
+
2002/08/17/big/img_312
|
| 776 |
+
2002/07/25/big/img_1044
|
| 777 |
+
2003/01/17/big/img_671
|
| 778 |
+
2002/08/30/big/img_18297
|
| 779 |
+
2002/07/25/big/img_755
|
| 780 |
+
2002/07/23/big/img_471
|
| 781 |
+
2002/08/21/big/img_39
|
| 782 |
+
2002/07/26/big/img_699
|
| 783 |
+
2003/01/14/big/img_33
|
| 784 |
+
2002/07/31/big/img_411
|
| 785 |
+
2002/08/16/big/img_645
|
| 786 |
+
2003/01/17/big/img_116
|
| 787 |
+
2002/09/02/big/img_15903
|
| 788 |
+
2002/08/20/big/img_120
|
| 789 |
+
2002/08/22/big/img_176
|
| 790 |
+
2002/07/29/big/img_1316
|
| 791 |
+
2002/08/27/big/img_19914
|
| 792 |
+
2002/07/22/big/img_719
|
| 793 |
+
2002/08/28/big/img_19239
|
| 794 |
+
2003/01/13/big/img_385
|
| 795 |
+
2002/08/08/big/img_525
|
| 796 |
+
2002/07/19/big/img_782
|
| 797 |
+
2002/08/13/big/img_843
|
| 798 |
+
2002/07/30/big/img_107
|
| 799 |
+
2002/08/11/big/img_752
|
| 800 |
+
2002/07/29/big/img_383
|
| 801 |
+
2002/08/26/big/img_249
|
| 802 |
+
2002/08/29/big/img_18860
|
| 803 |
+
2002/07/30/big/img_70
|
| 804 |
+
2002/07/26/big/img_194
|
| 805 |
+
2002/08/15/big/img_530
|
| 806 |
+
2002/08/08/big/img_816
|
| 807 |
+
2002/07/31/big/img_286
|
| 808 |
+
2003/01/13/big/img_294
|
| 809 |
+
2002/07/31/big/img_251
|
| 810 |
+
2002/07/24/big/img_13
|
| 811 |
+
2002/08/31/big/img_17938
|
| 812 |
+
2002/07/22/big/img_642
|
| 813 |
+
2003/01/14/big/img_728
|
| 814 |
+
2002/08/18/big/img_47
|
| 815 |
+
2002/08/22/big/img_306
|
| 816 |
+
2002/08/20/big/img_348
|
| 817 |
+
2002/08/15/big/img_764
|
| 818 |
+
2002/08/08/big/img_163
|
| 819 |
+
2002/07/23/big/img_531
|
| 820 |
+
2002/07/23/big/img_467
|
| 821 |
+
2003/01/16/big/img_743
|
| 822 |
+
2003/01/13/big/img_535
|
| 823 |
+
2002/08/02/big/img_523
|
| 824 |
+
2002/08/22/big/img_120
|
| 825 |
+
2002/08/11/big/img_496
|
| 826 |
+
2002/08/29/big/img_19075
|
| 827 |
+
2002/08/08/big/img_465
|
| 828 |
+
2002/08/09/big/img_790
|
| 829 |
+
2002/08/19/big/img_588
|
| 830 |
+
2002/08/23/big/img_407
|
| 831 |
+
2003/01/17/big/img_435
|
| 832 |
+
2002/08/24/big/img_398
|
| 833 |
+
2002/08/27/big/img_19899
|
| 834 |
+
2003/01/15/big/img_335
|
| 835 |
+
2002/08/13/big/img_493
|
| 836 |
+
2002/09/02/big/img_15460
|
| 837 |
+
2002/07/31/big/img_470
|
| 838 |
+
2002/08/05/big/img_3550
|
| 839 |
+
2002/07/28/big/img_123
|
| 840 |
+
2002/08/01/big/img_1498
|
| 841 |
+
2002/08/04/big/img_504
|
| 842 |
+
2003/01/17/big/img_427
|
| 843 |
+
2002/08/27/big/img_19708
|
| 844 |
+
2002/07/27/big/img_861
|
| 845 |
+
2002/07/25/big/img_685
|
| 846 |
+
2002/07/31/big/img_207
|
| 847 |
+
2003/01/14/big/img_745
|
| 848 |
+
2002/08/31/big/img_17756
|
| 849 |
+
2002/08/24/big/img_288
|
| 850 |
+
2002/08/18/big/img_181
|
| 851 |
+
2002/08/10/big/img_520
|
| 852 |
+
2002/08/25/big/img_705
|
| 853 |
+
2002/08/23/big/img_226
|
| 854 |
+
2002/08/04/big/img_727
|
| 855 |
+
2002/07/24/big/img_625
|
| 856 |
+
2002/08/28/big/img_19157
|
| 857 |
+
2002/08/23/big/img_586
|
| 858 |
+
2002/07/31/big/img_232
|
| 859 |
+
2003/01/13/big/img_240
|
| 860 |
+
2003/01/14/big/img_321
|
| 861 |
+
2003/01/15/big/img_533
|
| 862 |
+
2002/07/23/big/img_480
|
| 863 |
+
2002/07/24/big/img_371
|
| 864 |
+
2002/08/21/big/img_702
|
| 865 |
+
2002/08/31/big/img_17075
|
| 866 |
+
2002/09/02/big/img_15278
|
| 867 |
+
2002/07/29/big/img_246
|
| 868 |
+
2003/01/15/big/img_829
|
| 869 |
+
2003/01/15/big/img_1213
|
| 870 |
+
2003/01/16/big/img_441
|
| 871 |
+
2002/08/14/big/img_921
|
| 872 |
+
2002/07/23/big/img_425
|
| 873 |
+
2002/08/15/big/img_296
|
| 874 |
+
2002/07/19/big/img_135
|
| 875 |
+
2002/07/26/big/img_402
|
| 876 |
+
2003/01/17/big/img_88
|
| 877 |
+
2002/08/20/big/img_872
|
| 878 |
+
2002/08/13/big/img_1110
|
| 879 |
+
2003/01/16/big/img_1040
|
| 880 |
+
2002/07/23/big/img_9
|
| 881 |
+
2002/08/13/big/img_700
|
| 882 |
+
2002/08/16/big/img_371
|
| 883 |
+
2002/08/27/big/img_19966
|
| 884 |
+
2003/01/17/big/img_391
|
| 885 |
+
2002/08/18/big/img_426
|
| 886 |
+
2002/08/01/big/img_1618
|
| 887 |
+
2002/07/21/big/img_754
|
| 888 |
+
2003/01/14/big/img_1101
|
| 889 |
+
2003/01/16/big/img_1022
|
| 890 |
+
2002/07/22/big/img_275
|
| 891 |
+
2002/08/24/big/img_86
|
| 892 |
+
2002/08/17/big/img_582
|
| 893 |
+
2003/01/15/big/img_765
|
| 894 |
+
2003/01/17/big/img_449
|
| 895 |
+
2002/07/28/big/img_265
|
| 896 |
+
2003/01/13/big/img_552
|
| 897 |
+
2002/07/28/big/img_115
|
| 898 |
+
2003/01/16/big/img_56
|
| 899 |
+
2002/08/02/big/img_1232
|
| 900 |
+
2003/01/17/big/img_925
|
| 901 |
+
2002/07/22/big/img_445
|
| 902 |
+
2002/07/25/big/img_957
|
| 903 |
+
2002/07/20/big/img_589
|
| 904 |
+
2002/08/31/big/img_17107
|
| 905 |
+
2002/07/29/big/img_483
|
| 906 |
+
2002/08/14/big/img_1063
|
| 907 |
+
2002/08/07/big/img_1545
|
| 908 |
+
2002/08/14/big/img_680
|
| 909 |
+
2002/09/01/big/img_16694
|
| 910 |
+
2002/08/14/big/img_257
|
| 911 |
+
2002/08/11/big/img_726
|
| 912 |
+
2002/07/26/big/img_681
|
| 913 |
+
2002/07/25/big/img_481
|
| 914 |
+
2003/01/14/big/img_737
|
| 915 |
+
2002/08/28/big/img_19480
|
| 916 |
+
2003/01/16/big/img_362
|
| 917 |
+
2002/08/27/big/img_19865
|
| 918 |
+
2003/01/01/big/img_547
|
| 919 |
+
2002/09/02/big/img_15074
|
| 920 |
+
2002/08/01/big/img_1453
|
| 921 |
+
2002/08/22/big/img_594
|
| 922 |
+
2002/08/28/big/img_19263
|
| 923 |
+
2002/08/13/big/img_478
|
| 924 |
+
2002/07/29/big/img_1358
|
| 925 |
+
2003/01/14/big/img_1022
|
| 926 |
+
2002/08/16/big/img_450
|
| 927 |
+
2002/08/02/big/img_159
|
| 928 |
+
2002/07/26/big/img_781
|
| 929 |
+
2003/01/13/big/img_601
|
| 930 |
+
2002/08/20/big/img_407
|
| 931 |
+
2002/08/15/big/img_468
|
| 932 |
+
2002/08/31/big/img_17902
|
| 933 |
+
2002/08/16/big/img_81
|
| 934 |
+
2002/07/25/big/img_987
|
| 935 |
+
2002/07/25/big/img_500
|
| 936 |
+
2002/08/02/big/img_31
|
| 937 |
+
2002/08/18/big/img_538
|
| 938 |
+
2002/08/08/big/img_54
|
| 939 |
+
2002/07/23/big/img_686
|
| 940 |
+
2002/07/24/big/img_836
|
| 941 |
+
2003/01/17/big/img_734
|
| 942 |
+
2002/08/16/big/img_1055
|
| 943 |
+
2003/01/16/big/img_521
|
| 944 |
+
2002/07/25/big/img_612
|
| 945 |
+
2002/08/22/big/img_778
|
| 946 |
+
2002/08/03/big/img_251
|
| 947 |
+
2002/08/12/big/img_436
|
| 948 |
+
2002/08/23/big/img_705
|
| 949 |
+
2002/07/28/big/img_243
|
| 950 |
+
2002/07/25/big/img_1029
|
| 951 |
+
2002/08/20/big/img_287
|
| 952 |
+
2002/08/29/big/img_18739
|
| 953 |
+
2002/08/05/big/img_3272
|
| 954 |
+
2002/07/27/big/img_214
|
| 955 |
+
2003/01/14/big/img_5
|
| 956 |
+
2002/08/01/big/img_1380
|
| 957 |
+
2002/08/29/big/img_19097
|
| 958 |
+
2002/07/30/big/img_486
|
| 959 |
+
2002/08/29/big/img_18707
|
| 960 |
+
2002/08/10/big/img_559
|
| 961 |
+
2002/08/15/big/img_365
|
| 962 |
+
2002/08/09/big/img_525
|
| 963 |
+
2002/08/10/big/img_689
|
| 964 |
+
2002/07/25/big/img_502
|
| 965 |
+
2002/08/03/big/img_667
|
| 966 |
+
2002/08/10/big/img_855
|
| 967 |
+
2002/08/10/big/img_706
|
| 968 |
+
2002/08/18/big/img_603
|
| 969 |
+
2003/01/16/big/img_1055
|
| 970 |
+
2002/08/31/big/img_17890
|
| 971 |
+
2002/08/15/big/img_761
|
| 972 |
+
2003/01/15/big/img_489
|
| 973 |
+
2002/08/26/big/img_351
|
| 974 |
+
2002/08/01/big/img_1772
|
| 975 |
+
2002/08/31/big/img_17729
|
| 976 |
+
2002/07/25/big/img_609
|
| 977 |
+
2003/01/13/big/img_539
|
| 978 |
+
2002/07/27/big/img_686
|
| 979 |
+
2002/07/31/big/img_311
|
| 980 |
+
2002/08/22/big/img_799
|
| 981 |
+
2003/01/16/big/img_936
|
| 982 |
+
2002/08/31/big/img_17813
|
| 983 |
+
2002/08/04/big/img_862
|
| 984 |
+
2002/08/09/big/img_332
|
| 985 |
+
2002/07/20/big/img_148
|
| 986 |
+
2002/08/12/big/img_426
|
| 987 |
+
2002/07/24/big/img_69
|
| 988 |
+
2002/07/27/big/img_685
|
| 989 |
+
2002/08/02/big/img_480
|
| 990 |
+
2002/08/26/big/img_154
|
| 991 |
+
2002/07/24/big/img_598
|
| 992 |
+
2002/08/01/big/img_1881
|
| 993 |
+
2002/08/20/big/img_667
|
| 994 |
+
2003/01/14/big/img_495
|
| 995 |
+
2002/07/21/big/img_744
|
| 996 |
+
2002/07/30/big/img_150
|
| 997 |
+
2002/07/23/big/img_924
|
| 998 |
+
2002/08/08/big/img_272
|
| 999 |
+
2002/07/23/big/img_310
|
| 1000 |
+
2002/07/25/big/img_1011
|
| 1001 |
+
2002/09/02/big/img_15725
|
| 1002 |
+
2002/07/19/big/img_814
|
| 1003 |
+
2002/08/20/big/img_936
|
| 1004 |
+
2002/07/25/big/img_85
|
| 1005 |
+
2002/08/24/big/img_662
|
| 1006 |
+
2002/08/09/big/img_495
|
| 1007 |
+
2003/01/15/big/img_196
|
| 1008 |
+
2002/08/16/big/img_707
|
| 1009 |
+
2002/08/28/big/img_19370
|
| 1010 |
+
2002/08/06/big/img_2366
|
| 1011 |
+
2002/08/06/big/img_3012
|
| 1012 |
+
2002/08/01/big/img_1452
|
| 1013 |
+
2002/07/31/big/img_742
|
| 1014 |
+
2002/07/27/big/img_914
|
| 1015 |
+
2003/01/13/big/img_290
|
| 1016 |
+
2002/07/31/big/img_288
|
| 1017 |
+
2002/08/02/big/img_171
|
| 1018 |
+
2002/08/22/big/img_191
|
| 1019 |
+
2002/07/27/big/img_1066
|
| 1020 |
+
2002/08/12/big/img_383
|
| 1021 |
+
2003/01/17/big/img_1018
|
| 1022 |
+
2002/08/01/big/img_1785
|
| 1023 |
+
2002/08/11/big/img_390
|
| 1024 |
+
2002/08/27/big/img_20037
|
| 1025 |
+
2002/08/12/big/img_38
|
| 1026 |
+
2003/01/15/big/img_103
|
| 1027 |
+
2002/08/26/big/img_31
|
| 1028 |
+
2002/08/18/big/img_660
|
| 1029 |
+
2002/07/22/big/img_694
|
| 1030 |
+
2002/08/15/big/img_24
|
| 1031 |
+
2002/07/27/big/img_1077
|
| 1032 |
+
2002/08/01/big/img_1943
|
| 1033 |
+
2002/07/22/big/img_292
|
| 1034 |
+
2002/09/01/big/img_16857
|
| 1035 |
+
2002/07/22/big/img_892
|
| 1036 |
+
2003/01/14/big/img_46
|
| 1037 |
+
2002/08/09/big/img_469
|
| 1038 |
+
2002/08/09/big/img_414
|
| 1039 |
+
2003/01/16/big/img_40
|
| 1040 |
+
2002/08/28/big/img_19231
|
| 1041 |
+
2002/07/27/big/img_978
|
| 1042 |
+
2002/07/23/big/img_475
|
| 1043 |
+
2002/07/25/big/img_92
|
| 1044 |
+
2002/08/09/big/img_799
|
| 1045 |
+
2002/07/25/big/img_491
|
| 1046 |
+
2002/08/03/big/img_654
|
| 1047 |
+
2003/01/15/big/img_687
|
| 1048 |
+
2002/08/11/big/img_478
|
| 1049 |
+
2002/08/07/big/img_1664
|
| 1050 |
+
2002/08/20/big/img_362
|
| 1051 |
+
2002/08/01/big/img_1298
|
| 1052 |
+
2003/01/13/big/img_500
|
| 1053 |
+
2002/08/06/big/img_2896
|
| 1054 |
+
2002/08/30/big/img_18529
|
| 1055 |
+
2002/08/16/big/img_1020
|
| 1056 |
+
2002/07/29/big/img_892
|
| 1057 |
+
2002/08/29/big/img_18726
|
| 1058 |
+
2002/07/21/big/img_453
|
| 1059 |
+
2002/08/17/big/img_437
|
| 1060 |
+
2002/07/19/big/img_665
|
| 1061 |
+
2002/07/22/big/img_440
|
| 1062 |
+
2002/07/19/big/img_582
|
| 1063 |
+
2002/07/21/big/img_233
|
| 1064 |
+
2003/01/01/big/img_82
|
| 1065 |
+
2002/07/25/big/img_341
|
| 1066 |
+
2002/07/29/big/img_864
|
| 1067 |
+
2002/08/02/big/img_276
|
| 1068 |
+
2002/08/29/big/img_18654
|
| 1069 |
+
2002/07/27/big/img_1024
|
| 1070 |
+
2002/08/19/big/img_373
|
| 1071 |
+
2003/01/15/big/img_241
|
| 1072 |
+
2002/07/25/big/img_84
|
| 1073 |
+
2002/08/13/big/img_834
|
| 1074 |
+
2002/08/10/big/img_511
|
| 1075 |
+
2002/08/01/big/img_1627
|
| 1076 |
+
2002/08/08/big/img_607
|
| 1077 |
+
2002/08/06/big/img_2083
|
| 1078 |
+
2002/08/01/big/img_1486
|
| 1079 |
+
2002/08/08/big/img_700
|
| 1080 |
+
2002/08/01/big/img_1954
|
| 1081 |
+
2002/08/21/big/img_54
|
| 1082 |
+
2002/07/30/big/img_847
|
| 1083 |
+
2002/08/28/big/img_19169
|
| 1084 |
+
2002/07/21/big/img_549
|
| 1085 |
+
2002/08/03/big/img_693
|
| 1086 |
+
2002/07/31/big/img_1002
|
| 1087 |
+
2003/01/14/big/img_1035
|
| 1088 |
+
2003/01/16/big/img_622
|
| 1089 |
+
2002/07/30/big/img_1201
|
| 1090 |
+
2002/08/10/big/img_444
|
| 1091 |
+
2002/07/31/big/img_374
|
| 1092 |
+
2002/08/21/big/img_301
|
| 1093 |
+
2002/08/13/big/img_1095
|
| 1094 |
+
2003/01/13/big/img_288
|
| 1095 |
+
2002/07/25/big/img_232
|
| 1096 |
+
2003/01/13/big/img_967
|
| 1097 |
+
2002/08/26/big/img_360
|
| 1098 |
+
2002/08/05/big/img_67
|
| 1099 |
+
2002/08/29/big/img_18969
|
| 1100 |
+
2002/07/28/big/img_16
|
| 1101 |
+
2002/08/16/big/img_515
|
| 1102 |
+
2002/07/20/big/img_708
|
| 1103 |
+
2002/08/18/big/img_178
|
| 1104 |
+
2003/01/15/big/img_509
|
| 1105 |
+
2002/07/25/big/img_430
|
| 1106 |
+
2002/08/21/big/img_738
|
| 1107 |
+
2002/08/16/big/img_886
|
| 1108 |
+
2002/09/02/big/img_15605
|
| 1109 |
+
2002/09/01/big/img_16242
|
| 1110 |
+
2002/08/24/big/img_711
|
| 1111 |
+
2002/07/25/big/img_90
|
| 1112 |
+
2002/08/09/big/img_491
|
| 1113 |
+
2002/07/30/big/img_534
|
| 1114 |
+
2003/01/13/big/img_474
|
| 1115 |
+
2002/08/25/big/img_510
|
| 1116 |
+
2002/08/15/big/img_555
|
| 1117 |
+
2002/08/02/big/img_775
|
| 1118 |
+
2002/07/23/big/img_975
|
| 1119 |
+
2002/08/19/big/img_229
|
| 1120 |
+
2003/01/17/big/img_860
|
| 1121 |
+
2003/01/02/big/img_10
|
| 1122 |
+
2002/07/23/big/img_542
|
| 1123 |
+
2002/08/06/big/img_2535
|
| 1124 |
+
2002/07/22/big/img_37
|
| 1125 |
+
2002/08/06/big/img_2342
|
| 1126 |
+
2002/08/25/big/img_515
|
| 1127 |
+
2002/08/25/big/img_336
|
| 1128 |
+
2002/08/18/big/img_837
|
| 1129 |
+
2002/08/21/big/img_616
|
| 1130 |
+
2003/01/17/big/img_24
|
| 1131 |
+
2002/07/26/big/img_936
|
| 1132 |
+
2002/08/14/big/img_896
|
| 1133 |
+
2002/07/29/big/img_465
|
| 1134 |
+
2002/07/31/big/img_543
|
| 1135 |
+
2002/08/01/big/img_1411
|
| 1136 |
+
2002/08/02/big/img_423
|
| 1137 |
+
2002/08/21/big/img_44
|
| 1138 |
+
2002/07/31/big/img_11
|
| 1139 |
+
2003/01/15/big/img_628
|
| 1140 |
+
2003/01/15/big/img_605
|
| 1141 |
+
2002/07/30/big/img_571
|
| 1142 |
+
2002/07/23/big/img_428
|
| 1143 |
+
2002/08/15/big/img_942
|
| 1144 |
+
2002/07/26/big/img_531
|
| 1145 |
+
2003/01/16/big/img_59
|
| 1146 |
+
2002/08/02/big/img_410
|
| 1147 |
+
2002/07/31/big/img_230
|
| 1148 |
+
2002/08/19/big/img_806
|
| 1149 |
+
2003/01/14/big/img_462
|
| 1150 |
+
2002/08/16/big/img_370
|
| 1151 |
+
2002/08/13/big/img_380
|
| 1152 |
+
2002/08/16/big/img_932
|
| 1153 |
+
2002/07/19/big/img_393
|
| 1154 |
+
2002/08/20/big/img_764
|
| 1155 |
+
2002/08/15/big/img_616
|
| 1156 |
+
2002/07/26/big/img_267
|
| 1157 |
+
2002/07/27/big/img_1069
|
| 1158 |
+
2002/08/14/big/img_1041
|
| 1159 |
+
2003/01/13/big/img_594
|
| 1160 |
+
2002/09/01/big/img_16845
|
| 1161 |
+
2002/08/09/big/img_229
|
| 1162 |
+
2003/01/16/big/img_639
|
| 1163 |
+
2002/08/19/big/img_398
|
| 1164 |
+
2002/08/18/big/img_978
|
| 1165 |
+
2002/08/24/big/img_296
|
| 1166 |
+
2002/07/29/big/img_415
|
| 1167 |
+
2002/07/30/big/img_923
|
| 1168 |
+
2002/08/18/big/img_575
|
| 1169 |
+
2002/08/22/big/img_182
|
| 1170 |
+
2002/07/25/big/img_806
|
| 1171 |
+
2002/07/22/big/img_49
|
| 1172 |
+
2002/07/29/big/img_989
|
| 1173 |
+
2003/01/17/big/img_789
|
| 1174 |
+
2003/01/15/big/img_503
|
| 1175 |
+
2002/09/01/big/img_16062
|
| 1176 |
+
2003/01/17/big/img_794
|
| 1177 |
+
2002/08/15/big/img_564
|
| 1178 |
+
2003/01/15/big/img_222
|
| 1179 |
+
2002/08/01/big/img_1656
|
| 1180 |
+
2003/01/13/big/img_432
|
| 1181 |
+
2002/07/19/big/img_426
|
| 1182 |
+
2002/08/17/big/img_244
|
| 1183 |
+
2002/08/13/big/img_805
|
| 1184 |
+
2002/09/02/big/img_15067
|
| 1185 |
+
2002/08/11/big/img_58
|
| 1186 |
+
2002/08/22/big/img_636
|
| 1187 |
+
2002/07/22/big/img_416
|
| 1188 |
+
2002/08/13/big/img_836
|
| 1189 |
+
2002/08/26/big/img_363
|
| 1190 |
+
2002/07/30/big/img_917
|
| 1191 |
+
2003/01/14/big/img_206
|
| 1192 |
+
2002/08/12/big/img_311
|
| 1193 |
+
2002/08/31/big/img_17623
|
| 1194 |
+
2002/07/29/big/img_661
|
| 1195 |
+
2003/01/13/big/img_417
|
| 1196 |
+
2002/08/02/big/img_463
|
| 1197 |
+
2002/08/02/big/img_669
|
| 1198 |
+
2002/08/26/big/img_670
|
| 1199 |
+
2002/08/02/big/img_375
|
| 1200 |
+
2002/07/19/big/img_209
|
| 1201 |
+
2002/08/08/big/img_115
|
| 1202 |
+
2002/08/21/big/img_399
|
| 1203 |
+
2002/08/20/big/img_911
|
| 1204 |
+
2002/08/07/big/img_1212
|
| 1205 |
+
2002/08/20/big/img_578
|
| 1206 |
+
2002/08/22/big/img_554
|
| 1207 |
+
2002/08/21/big/img_484
|
| 1208 |
+
2002/07/25/big/img_450
|
| 1209 |
+
2002/08/03/big/img_542
|
| 1210 |
+
2002/08/15/big/img_561
|
| 1211 |
+
2002/07/23/big/img_360
|
| 1212 |
+
2002/08/30/big/img_18137
|
| 1213 |
+
2002/07/25/big/img_250
|
| 1214 |
+
2002/08/03/big/img_647
|
| 1215 |
+
2002/08/20/big/img_375
|
| 1216 |
+
2002/08/14/big/img_387
|
| 1217 |
+
2002/09/01/big/img_16990
|
| 1218 |
+
2002/08/28/big/img_19341
|
| 1219 |
+
2003/01/15/big/img_239
|
| 1220 |
+
2002/08/20/big/img_528
|
| 1221 |
+
2002/08/12/big/img_130
|
| 1222 |
+
2002/09/02/big/img_15108
|
| 1223 |
+
2003/01/15/big/img_372
|
| 1224 |
+
2002/08/16/big/img_678
|
| 1225 |
+
2002/08/04/big/img_623
|
| 1226 |
+
2002/07/23/big/img_477
|
| 1227 |
+
2002/08/28/big/img_19590
|
| 1228 |
+
2003/01/17/big/img_978
|
| 1229 |
+
2002/09/01/big/img_16692
|
| 1230 |
+
2002/07/20/big/img_109
|
| 1231 |
+
2002/08/06/big/img_2660
|
| 1232 |
+
2003/01/14/big/img_464
|
| 1233 |
+
2002/08/09/big/img_618
|
| 1234 |
+
2002/07/22/big/img_722
|
| 1235 |
+
2002/08/25/big/img_419
|
| 1236 |
+
2002/08/03/big/img_314
|
| 1237 |
+
2002/08/25/big/img_40
|
| 1238 |
+
2002/07/27/big/img_430
|
| 1239 |
+
2002/08/10/big/img_569
|
| 1240 |
+
2002/08/23/big/img_398
|
| 1241 |
+
2002/07/23/big/img_893
|
| 1242 |
+
2002/08/16/big/img_261
|
| 1243 |
+
2002/08/06/big/img_2668
|
| 1244 |
+
2002/07/22/big/img_835
|
| 1245 |
+
2002/09/02/big/img_15093
|
| 1246 |
+
2003/01/16/big/img_65
|
| 1247 |
+
2002/08/21/big/img_448
|
| 1248 |
+
2003/01/14/big/img_351
|
| 1249 |
+
2003/01/17/big/img_133
|
| 1250 |
+
2002/07/28/big/img_493
|
| 1251 |
+
2003/01/15/big/img_640
|
| 1252 |
+
2002/09/01/big/img_16880
|
| 1253 |
+
2002/08/15/big/img_350
|
| 1254 |
+
2002/08/20/big/img_624
|
| 1255 |
+
2002/08/25/big/img_604
|
| 1256 |
+
2002/08/06/big/img_2200
|
| 1257 |
+
2002/08/23/big/img_290
|
| 1258 |
+
2002/08/13/big/img_1152
|
| 1259 |
+
2003/01/14/big/img_251
|
| 1260 |
+
2002/08/02/big/img_538
|
| 1261 |
+
2002/08/22/big/img_613
|
| 1262 |
+
2003/01/13/big/img_351
|
| 1263 |
+
2002/08/18/big/img_368
|
| 1264 |
+
2002/07/23/big/img_392
|
| 1265 |
+
2002/07/25/big/img_198
|
| 1266 |
+
2002/07/25/big/img_418
|
| 1267 |
+
2002/08/26/big/img_614
|
| 1268 |
+
2002/07/23/big/img_405
|
| 1269 |
+
2003/01/14/big/img_445
|
| 1270 |
+
2002/07/25/big/img_326
|
| 1271 |
+
2002/08/10/big/img_734
|
| 1272 |
+
2003/01/14/big/img_530
|
| 1273 |
+
2002/08/08/big/img_561
|
| 1274 |
+
2002/08/29/big/img_18990
|
| 1275 |
+
2002/08/10/big/img_576
|
| 1276 |
+
2002/07/29/big/img_1494
|
| 1277 |
+
2002/07/19/big/img_198
|
| 1278 |
+
2002/08/10/big/img_562
|
| 1279 |
+
2002/07/22/big/img_901
|
| 1280 |
+
2003/01/14/big/img_37
|
| 1281 |
+
2002/09/02/big/img_15629
|
| 1282 |
+
2003/01/14/big/img_58
|
| 1283 |
+
2002/08/01/big/img_1364
|
| 1284 |
+
2002/07/27/big/img_636
|
| 1285 |
+
2003/01/13/big/img_241
|
| 1286 |
+
2002/09/01/big/img_16988
|
| 1287 |
+
2003/01/13/big/img_560
|
| 1288 |
+
2002/08/09/big/img_533
|
| 1289 |
+
2002/07/31/big/img_249
|
| 1290 |
+
2003/01/17/big/img_1007
|
| 1291 |
+
2002/07/21/big/img_64
|
| 1292 |
+
2003/01/13/big/img_537
|
| 1293 |
+
2003/01/15/big/img_606
|
| 1294 |
+
2002/08/18/big/img_651
|
| 1295 |
+
2002/08/24/big/img_405
|
| 1296 |
+
2002/07/26/big/img_837
|
| 1297 |
+
2002/08/09/big/img_562
|
| 1298 |
+
2002/08/01/big/img_1983
|
| 1299 |
+
2002/08/03/big/img_514
|
| 1300 |
+
2002/07/29/big/img_314
|
| 1301 |
+
2002/08/12/big/img_493
|
| 1302 |
+
2003/01/14/big/img_121
|
| 1303 |
+
2003/01/14/big/img_479
|
| 1304 |
+
2002/08/04/big/img_410
|
| 1305 |
+
2002/07/22/big/img_607
|
| 1306 |
+
2003/01/17/big/img_417
|
| 1307 |
+
2002/07/20/big/img_547
|
| 1308 |
+
2002/08/13/big/img_396
|
| 1309 |
+
2002/08/31/big/img_17538
|
| 1310 |
+
2002/08/13/big/img_187
|
| 1311 |
+
2002/08/12/big/img_328
|
| 1312 |
+
2003/01/14/big/img_569
|
| 1313 |
+
2002/07/27/big/img_1081
|
| 1314 |
+
2002/08/14/big/img_504
|
| 1315 |
+
2002/08/23/big/img_785
|
| 1316 |
+
2002/07/26/big/img_339
|
| 1317 |
+
2002/08/07/big/img_1156
|
| 1318 |
+
2002/08/07/big/img_1456
|
| 1319 |
+
2002/08/23/big/img_378
|
| 1320 |
+
2002/08/27/big/img_19719
|
| 1321 |
+
2002/07/31/big/img_39
|
| 1322 |
+
2002/07/31/big/img_883
|
| 1323 |
+
2003/01/14/big/img_676
|
| 1324 |
+
2002/07/29/big/img_214
|
| 1325 |
+
2002/07/26/big/img_669
|
| 1326 |
+
2002/07/25/big/img_202
|
| 1327 |
+
2002/08/08/big/img_259
|
| 1328 |
+
2003/01/17/big/img_943
|
| 1329 |
+
2003/01/15/big/img_512
|
| 1330 |
+
2002/08/05/big/img_3295
|
| 1331 |
+
2002/08/27/big/img_19685
|
| 1332 |
+
2002/08/08/big/img_277
|
| 1333 |
+
2002/08/30/big/img_18154
|
| 1334 |
+
2002/07/22/big/img_663
|
| 1335 |
+
2002/08/29/big/img_18914
|
| 1336 |
+
2002/07/31/big/img_908
|
| 1337 |
+
2002/08/27/big/img_19926
|
| 1338 |
+
2003/01/13/big/img_791
|
| 1339 |
+
2003/01/15/big/img_827
|
| 1340 |
+
2002/08/18/big/img_878
|
| 1341 |
+
2002/08/14/big/img_670
|
| 1342 |
+
2002/07/20/big/img_182
|
| 1343 |
+
2002/08/15/big/img_291
|
| 1344 |
+
2002/08/06/big/img_2600
|
| 1345 |
+
2002/07/23/big/img_587
|
| 1346 |
+
2002/08/14/big/img_577
|
| 1347 |
+
2003/01/15/big/img_585
|
| 1348 |
+
2002/07/30/big/img_310
|
| 1349 |
+
2002/08/03/big/img_658
|
| 1350 |
+
2002/08/10/big/img_157
|
| 1351 |
+
2002/08/19/big/img_811
|
| 1352 |
+
2002/07/29/big/img_1318
|
| 1353 |
+
2002/08/04/big/img_104
|
| 1354 |
+
2002/07/30/big/img_332
|
| 1355 |
+
2002/07/24/big/img_789
|
| 1356 |
+
2002/07/29/big/img_516
|
| 1357 |
+
2002/07/23/big/img_843
|
| 1358 |
+
2002/08/01/big/img_1528
|
| 1359 |
+
2002/08/13/big/img_798
|
| 1360 |
+
2002/08/07/big/img_1729
|
| 1361 |
+
2002/08/28/big/img_19448
|
| 1362 |
+
2003/01/16/big/img_95
|
| 1363 |
+
2002/08/12/big/img_473
|
| 1364 |
+
2002/07/27/big/img_269
|
| 1365 |
+
2003/01/16/big/img_621
|
| 1366 |
+
2002/07/29/big/img_772
|
| 1367 |
+
2002/07/24/big/img_171
|
| 1368 |
+
2002/07/19/big/img_429
|
| 1369 |
+
2002/08/07/big/img_1933
|
| 1370 |
+
2002/08/27/big/img_19629
|
| 1371 |
+
2002/08/05/big/img_3688
|
| 1372 |
+
2002/08/07/big/img_1691
|
| 1373 |
+
2002/07/23/big/img_600
|
| 1374 |
+
2002/07/29/big/img_666
|
| 1375 |
+
2002/08/25/big/img_566
|
| 1376 |
+
2002/08/06/big/img_2659
|
| 1377 |
+
2002/08/29/big/img_18929
|
| 1378 |
+
2002/08/16/big/img_407
|
| 1379 |
+
2002/08/18/big/img_774
|
| 1380 |
+
2002/08/19/big/img_249
|
| 1381 |
+
2002/08/06/big/img_2427
|
| 1382 |
+
2002/08/29/big/img_18899
|
| 1383 |
+
2002/08/01/big/img_1818
|
| 1384 |
+
2002/07/31/big/img_108
|
| 1385 |
+
2002/07/29/big/img_500
|
| 1386 |
+
2002/08/11/big/img_115
|
| 1387 |
+
2002/07/19/big/img_521
|
| 1388 |
+
2002/08/02/big/img_1163
|
| 1389 |
+
2002/07/22/big/img_62
|
| 1390 |
+
2002/08/13/big/img_466
|
| 1391 |
+
2002/08/21/big/img_956
|
| 1392 |
+
2002/08/23/big/img_602
|
| 1393 |
+
2002/08/20/big/img_858
|
| 1394 |
+
2002/07/25/big/img_690
|
| 1395 |
+
2002/07/19/big/img_130
|
| 1396 |
+
2002/08/04/big/img_874
|
| 1397 |
+
2002/07/26/big/img_489
|
| 1398 |
+
2002/07/22/big/img_548
|
| 1399 |
+
2002/08/10/big/img_191
|
| 1400 |
+
2002/07/25/big/img_1051
|
| 1401 |
+
2002/08/18/big/img_473
|
| 1402 |
+
2002/08/12/big/img_755
|
| 1403 |
+
2002/08/18/big/img_413
|
| 1404 |
+
2002/08/08/big/img_1044
|
| 1405 |
+
2002/08/17/big/img_680
|
| 1406 |
+
2002/08/26/big/img_235
|
| 1407 |
+
2002/08/20/big/img_330
|
| 1408 |
+
2002/08/22/big/img_344
|
| 1409 |
+
2002/08/09/big/img_593
|
| 1410 |
+
2002/07/31/big/img_1006
|
| 1411 |
+
2002/08/14/big/img_337
|
| 1412 |
+
2002/08/16/big/img_728
|
| 1413 |
+
2002/07/24/big/img_834
|
| 1414 |
+
2002/08/04/big/img_552
|
| 1415 |
+
2002/09/02/big/img_15213
|
| 1416 |
+
2002/07/25/big/img_725
|
| 1417 |
+
2002/08/30/big/img_18290
|
| 1418 |
+
2003/01/01/big/img_475
|
| 1419 |
+
2002/07/27/big/img_1083
|
| 1420 |
+
2002/08/29/big/img_18955
|
| 1421 |
+
2002/08/31/big/img_17232
|
| 1422 |
+
2002/08/08/big/img_480
|
| 1423 |
+
2002/08/01/big/img_1311
|
| 1424 |
+
2002/07/30/big/img_745
|
| 1425 |
+
2002/08/03/big/img_649
|
| 1426 |
+
2002/08/12/big/img_193
|
| 1427 |
+
2002/07/29/big/img_228
|
| 1428 |
+
2002/07/25/big/img_836
|
| 1429 |
+
2002/08/20/big/img_400
|
| 1430 |
+
2002/07/30/big/img_507
|
| 1431 |
+
2002/09/02/big/img_15072
|
| 1432 |
+
2002/07/26/big/img_658
|
| 1433 |
+
2002/07/28/big/img_503
|
| 1434 |
+
2002/08/05/big/img_3814
|
| 1435 |
+
2002/08/24/big/img_745
|
| 1436 |
+
2003/01/13/big/img_817
|
| 1437 |
+
2002/08/08/big/img_579
|
| 1438 |
+
2002/07/22/big/img_251
|
| 1439 |
+
2003/01/13/big/img_689
|
| 1440 |
+
2002/07/25/big/img_407
|
| 1441 |
+
2002/08/13/big/img_1050
|
| 1442 |
+
2002/08/14/big/img_733
|
| 1443 |
+
2002/07/24/big/img_82
|
| 1444 |
+
2003/01/17/big/img_288
|
| 1445 |
+
2003/01/15/big/img_475
|
| 1446 |
+
2002/08/14/big/img_620
|
| 1447 |
+
2002/08/21/big/img_167
|
| 1448 |
+
2002/07/19/big/img_300
|
| 1449 |
+
2002/07/26/big/img_219
|
| 1450 |
+
2002/08/01/big/img_1468
|
| 1451 |
+
2002/07/23/big/img_260
|
| 1452 |
+
2002/08/09/big/img_555
|
| 1453 |
+
2002/07/19/big/img_160
|
| 1454 |
+
2002/08/02/big/img_1060
|
| 1455 |
+
2003/01/14/big/img_149
|
| 1456 |
+
2002/08/15/big/img_346
|
| 1457 |
+
2002/08/24/big/img_597
|
| 1458 |
+
2002/08/22/big/img_502
|
| 1459 |
+
2002/08/30/big/img_18228
|
| 1460 |
+
2002/07/21/big/img_766
|
| 1461 |
+
2003/01/15/big/img_841
|
| 1462 |
+
2002/07/24/big/img_516
|
| 1463 |
+
2002/08/02/big/img_265
|
| 1464 |
+
2002/08/15/big/img_1243
|
| 1465 |
+
2003/01/15/big/img_223
|
| 1466 |
+
2002/08/04/big/img_236
|
| 1467 |
+
2002/07/22/big/img_309
|
| 1468 |
+
2002/07/20/big/img_656
|
| 1469 |
+
2002/07/31/big/img_412
|
| 1470 |
+
2002/09/01/big/img_16462
|
| 1471 |
+
2003/01/16/big/img_431
|
| 1472 |
+
2002/07/22/big/img_793
|
| 1473 |
+
2002/08/15/big/img_877
|
| 1474 |
+
2002/07/26/big/img_282
|
| 1475 |
+
2002/07/25/big/img_529
|
| 1476 |
+
2002/08/24/big/img_613
|
| 1477 |
+
2003/01/17/big/img_700
|
| 1478 |
+
2002/08/06/big/img_2526
|
| 1479 |
+
2002/08/24/big/img_394
|
| 1480 |
+
2002/08/21/big/img_521
|
| 1481 |
+
2002/08/25/big/img_560
|
| 1482 |
+
2002/07/29/big/img_966
|
| 1483 |
+
2002/07/25/big/img_448
|
| 1484 |
+
2003/01/13/big/img_782
|
| 1485 |
+
2002/08/21/big/img_296
|
| 1486 |
+
2002/09/01/big/img_16755
|
| 1487 |
+
2002/08/05/big/img_3552
|
| 1488 |
+
2002/09/02/big/img_15823
|
| 1489 |
+
2003/01/14/big/img_193
|
| 1490 |
+
2002/07/21/big/img_159
|
| 1491 |
+
2002/08/02/big/img_564
|
| 1492 |
+
2002/08/16/big/img_300
|
| 1493 |
+
2002/07/19/big/img_269
|
| 1494 |
+
2002/08/13/big/img_676
|
| 1495 |
+
2002/07/28/big/img_57
|
| 1496 |
+
2002/08/05/big/img_3318
|
| 1497 |
+
2002/07/31/big/img_218
|
| 1498 |
+
2002/08/21/big/img_898
|
| 1499 |
+
2002/07/29/big/img_109
|
| 1500 |
+
2002/07/19/big/img_854
|
| 1501 |
+
2002/08/23/big/img_311
|
| 1502 |
+
2002/08/14/big/img_318
|
| 1503 |
+
2002/07/25/big/img_523
|
| 1504 |
+
2002/07/21/big/img_678
|
| 1505 |
+
2003/01/17/big/img_690
|
| 1506 |
+
2002/08/28/big/img_19503
|
| 1507 |
+
2002/08/18/big/img_251
|
| 1508 |
+
2002/08/22/big/img_672
|
| 1509 |
+
2002/08/20/big/img_663
|
| 1510 |
+
2002/08/02/big/img_148
|
| 1511 |
+
2002/09/02/big/img_15580
|
| 1512 |
+
2002/07/25/big/img_778
|
| 1513 |
+
2002/08/14/big/img_565
|
| 1514 |
+
2002/08/12/big/img_374
|
| 1515 |
+
2002/08/13/big/img_1018
|
| 1516 |
+
2002/08/20/big/img_474
|
| 1517 |
+
2002/08/25/big/img_33
|
| 1518 |
+
2002/08/02/big/img_1190
|
| 1519 |
+
2002/08/08/big/img_864
|
| 1520 |
+
2002/08/14/big/img_1071
|
| 1521 |
+
2002/08/30/big/img_18103
|
| 1522 |
+
2002/08/18/big/img_533
|
| 1523 |
+
2003/01/16/big/img_650
|
| 1524 |
+
2002/07/25/big/img_108
|
| 1525 |
+
2002/07/26/big/img_81
|
| 1526 |
+
2002/07/27/big/img_543
|
| 1527 |
+
2002/07/29/big/img_521
|
| 1528 |
+
2003/01/13/big/img_434
|
| 1529 |
+
2002/08/26/big/img_674
|
| 1530 |
+
2002/08/06/big/img_2932
|
| 1531 |
+
2002/08/07/big/img_1262
|
| 1532 |
+
2003/01/15/big/img_201
|
| 1533 |
+
2003/01/16/big/img_673
|
| 1534 |
+
2002/09/02/big/img_15988
|
| 1535 |
+
2002/07/29/big/img_1306
|
| 1536 |
+
2003/01/14/big/img_1072
|
| 1537 |
+
2002/08/30/big/img_18232
|
| 1538 |
+
2002/08/05/big/img_3711
|
| 1539 |
+
2002/07/23/big/img_775
|
| 1540 |
+
2002/08/01/big/img_16
|
| 1541 |
+
2003/01/16/big/img_630
|
| 1542 |
+
2002/08/22/big/img_695
|
| 1543 |
+
2002/08/14/big/img_51
|
| 1544 |
+
2002/08/14/big/img_782
|
| 1545 |
+
2002/08/24/big/img_742
|
| 1546 |
+
2003/01/14/big/img_512
|
| 1547 |
+
2003/01/15/big/img_1183
|
| 1548 |
+
2003/01/15/big/img_714
|
| 1549 |
+
2002/08/01/big/img_2078
|
| 1550 |
+
2002/07/31/big/img_682
|
| 1551 |
+
2002/09/02/big/img_15687
|
| 1552 |
+
2002/07/26/big/img_518
|
| 1553 |
+
2002/08/27/big/img_19676
|
| 1554 |
+
2002/09/02/big/img_15969
|
| 1555 |
+
2002/08/02/big/img_931
|
| 1556 |
+
2002/08/25/big/img_508
|
| 1557 |
+
2002/08/29/big/img_18616
|
| 1558 |
+
2002/07/22/big/img_839
|
| 1559 |
+
2002/07/28/big/img_313
|
| 1560 |
+
2003/01/14/big/img_155
|
| 1561 |
+
2002/08/02/big/img_1105
|
| 1562 |
+
2002/08/09/big/img_53
|
| 1563 |
+
2002/08/16/big/img_469
|
| 1564 |
+
2002/08/15/big/img_502
|
| 1565 |
+
2002/08/20/big/img_575
|
| 1566 |
+
2002/07/25/big/img_138
|
| 1567 |
+
2003/01/16/big/img_579
|
| 1568 |
+
2002/07/19/big/img_352
|
| 1569 |
+
2003/01/14/big/img_762
|
| 1570 |
+
2003/01/01/big/img_588
|
| 1571 |
+
2002/08/02/big/img_981
|
| 1572 |
+
2002/08/21/big/img_447
|
| 1573 |
+
2002/09/01/big/img_16151
|
| 1574 |
+
2003/01/14/big/img_769
|
| 1575 |
+
2002/08/23/big/img_461
|
| 1576 |
+
2002/08/17/big/img_240
|
| 1577 |
+
2002/09/02/big/img_15220
|
| 1578 |
+
2002/07/19/big/img_408
|
| 1579 |
+
2002/09/02/big/img_15496
|
| 1580 |
+
2002/07/29/big/img_758
|
| 1581 |
+
2002/08/28/big/img_19392
|
| 1582 |
+
2002/08/06/big/img_2723
|
| 1583 |
+
2002/08/31/big/img_17752
|
| 1584 |
+
2002/08/23/big/img_469
|
| 1585 |
+
2002/08/13/big/img_515
|
| 1586 |
+
2002/09/02/big/img_15551
|
| 1587 |
+
2002/08/03/big/img_462
|
| 1588 |
+
2002/07/24/big/img_613
|
| 1589 |
+
2002/07/22/big/img_61
|
| 1590 |
+
2002/08/08/big/img_171
|
| 1591 |
+
2002/08/21/big/img_177
|
| 1592 |
+
2003/01/14/big/img_105
|
| 1593 |
+
2002/08/02/big/img_1017
|
| 1594 |
+
2002/08/22/big/img_106
|
| 1595 |
+
2002/07/27/big/img_542
|
| 1596 |
+
2002/07/21/big/img_665
|
| 1597 |
+
2002/07/23/big/img_595
|
| 1598 |
+
2002/08/04/big/img_657
|
| 1599 |
+
2002/08/29/big/img_19002
|
| 1600 |
+
2003/01/15/big/img_550
|
| 1601 |
+
2002/08/14/big/img_662
|
| 1602 |
+
2002/07/20/big/img_425
|
| 1603 |
+
2002/08/30/big/img_18528
|
| 1604 |
+
2002/07/26/big/img_611
|
| 1605 |
+
2002/07/22/big/img_849
|
| 1606 |
+
2002/08/07/big/img_1655
|
| 1607 |
+
2002/08/21/big/img_638
|
| 1608 |
+
2003/01/17/big/img_732
|
| 1609 |
+
2003/01/01/big/img_496
|
| 1610 |
+
2002/08/18/big/img_713
|
| 1611 |
+
2002/08/08/big/img_109
|
| 1612 |
+
2002/07/27/big/img_1008
|
| 1613 |
+
2002/07/20/big/img_559
|
| 1614 |
+
2002/08/16/big/img_699
|
| 1615 |
+
2002/08/31/big/img_17702
|
| 1616 |
+
2002/07/31/big/img_1013
|
| 1617 |
+
2002/08/01/big/img_2027
|
| 1618 |
+
2002/08/02/big/img_1001
|
| 1619 |
+
2002/08/03/big/img_210
|
| 1620 |
+
2002/08/01/big/img_2087
|
| 1621 |
+
2003/01/14/big/img_199
|
| 1622 |
+
2002/07/29/big/img_48
|
| 1623 |
+
2002/07/19/big/img_727
|
| 1624 |
+
2002/08/09/big/img_249
|
| 1625 |
+
2002/08/04/big/img_632
|
| 1626 |
+
2002/08/22/big/img_620
|
| 1627 |
+
2003/01/01/big/img_457
|
| 1628 |
+
2002/08/05/big/img_3223
|
| 1629 |
+
2002/07/27/big/img_240
|
| 1630 |
+
2002/07/25/big/img_797
|
| 1631 |
+
2002/08/13/big/img_430
|
| 1632 |
+
2002/07/25/big/img_615
|
| 1633 |
+
2002/08/12/big/img_28
|
| 1634 |
+
2002/07/30/big/img_220
|
| 1635 |
+
2002/07/24/big/img_89
|
| 1636 |
+
2002/08/21/big/img_357
|
| 1637 |
+
2002/08/09/big/img_590
|
| 1638 |
+
2003/01/13/big/img_525
|
| 1639 |
+
2002/08/17/big/img_818
|
| 1640 |
+
2003/01/02/big/img_7
|
| 1641 |
+
2002/07/26/big/img_636
|
| 1642 |
+
2003/01/13/big/img_1122
|
| 1643 |
+
2002/07/23/big/img_810
|
| 1644 |
+
2002/08/20/big/img_888
|
| 1645 |
+
2002/07/27/big/img_3
|
| 1646 |
+
2002/08/15/big/img_451
|
| 1647 |
+
2002/09/02/big/img_15787
|
| 1648 |
+
2002/07/31/big/img_281
|
| 1649 |
+
2002/08/05/big/img_3274
|
| 1650 |
+
2002/08/07/big/img_1254
|
| 1651 |
+
2002/07/31/big/img_27
|
| 1652 |
+
2002/08/01/big/img_1366
|
| 1653 |
+
2002/07/30/big/img_182
|
| 1654 |
+
2002/08/27/big/img_19690
|
| 1655 |
+
2002/07/29/big/img_68
|
| 1656 |
+
2002/08/23/big/img_754
|
| 1657 |
+
2002/07/30/big/img_540
|
| 1658 |
+
2002/08/27/big/img_20063
|
| 1659 |
+
2002/08/14/big/img_471
|
| 1660 |
+
2002/08/02/big/img_615
|
| 1661 |
+
2002/07/30/big/img_186
|
| 1662 |
+
2002/08/25/big/img_150
|
| 1663 |
+
2002/07/27/big/img_626
|
| 1664 |
+
2002/07/20/big/img_225
|
| 1665 |
+
2003/01/15/big/img_1252
|
| 1666 |
+
2002/07/19/big/img_367
|
| 1667 |
+
2003/01/15/big/img_582
|
| 1668 |
+
2002/08/09/big/img_572
|
| 1669 |
+
2002/08/08/big/img_428
|
| 1670 |
+
2003/01/15/big/img_639
|
| 1671 |
+
2002/08/28/big/img_19245
|
| 1672 |
+
2002/07/24/big/img_321
|
| 1673 |
+
2002/08/02/big/img_662
|
| 1674 |
+
2002/08/08/big/img_1033
|
| 1675 |
+
2003/01/17/big/img_867
|
| 1676 |
+
2002/07/22/big/img_652
|
| 1677 |
+
2003/01/14/big/img_224
|
| 1678 |
+
2002/08/18/big/img_49
|
| 1679 |
+
2002/07/26/big/img_46
|
| 1680 |
+
2002/08/31/big/img_18021
|
| 1681 |
+
2002/07/25/big/img_151
|
| 1682 |
+
2002/08/23/big/img_540
|
| 1683 |
+
2002/08/25/big/img_693
|
| 1684 |
+
2002/07/23/big/img_340
|
| 1685 |
+
2002/07/28/big/img_117
|
| 1686 |
+
2002/09/02/big/img_15768
|
| 1687 |
+
2002/08/26/big/img_562
|
| 1688 |
+
2002/07/24/big/img_480
|
| 1689 |
+
2003/01/15/big/img_341
|
| 1690 |
+
2002/08/10/big/img_783
|
| 1691 |
+
2002/08/20/big/img_132
|
| 1692 |
+
2003/01/14/big/img_370
|
| 1693 |
+
2002/07/20/big/img_720
|
| 1694 |
+
2002/08/03/big/img_144
|
| 1695 |
+
2002/08/20/big/img_538
|
| 1696 |
+
2002/08/01/big/img_1745
|
| 1697 |
+
2002/08/11/big/img_683
|
| 1698 |
+
2002/08/03/big/img_328
|
| 1699 |
+
2002/08/10/big/img_793
|
| 1700 |
+
2002/08/14/big/img_689
|
| 1701 |
+
2002/08/02/big/img_162
|
| 1702 |
+
2003/01/17/big/img_411
|
| 1703 |
+
2002/07/31/big/img_361
|
| 1704 |
+
2002/08/15/big/img_289
|
| 1705 |
+
2002/08/08/big/img_254
|
| 1706 |
+
2002/08/15/big/img_996
|
| 1707 |
+
2002/08/20/big/img_785
|
| 1708 |
+
2002/07/24/big/img_511
|
| 1709 |
+
2002/08/06/big/img_2614
|
| 1710 |
+
2002/08/29/big/img_18733
|
| 1711 |
+
2002/08/17/big/img_78
|
| 1712 |
+
2002/07/30/big/img_378
|
| 1713 |
+
2002/08/31/big/img_17947
|
| 1714 |
+
2002/08/26/big/img_88
|
| 1715 |
+
2002/07/30/big/img_558
|
| 1716 |
+
2002/08/02/big/img_67
|
| 1717 |
+
2003/01/14/big/img_325
|
| 1718 |
+
2002/07/29/big/img_1357
|
| 1719 |
+
2002/07/19/big/img_391
|
| 1720 |
+
2002/07/30/big/img_307
|
| 1721 |
+
2003/01/13/big/img_219
|
| 1722 |
+
2002/07/24/big/img_807
|
| 1723 |
+
2002/08/23/big/img_543
|
| 1724 |
+
2002/08/29/big/img_18620
|
| 1725 |
+
2002/07/22/big/img_769
|
| 1726 |
+
2002/08/26/big/img_503
|
| 1727 |
+
2002/07/30/big/img_78
|
| 1728 |
+
2002/08/14/big/img_1036
|
| 1729 |
+
2002/08/09/big/img_58
|
| 1730 |
+
2002/07/24/big/img_616
|
| 1731 |
+
2002/08/02/big/img_464
|
| 1732 |
+
2002/07/26/big/img_576
|
| 1733 |
+
2002/07/22/big/img_273
|
| 1734 |
+
2003/01/16/big/img_470
|
| 1735 |
+
2002/07/29/big/img_329
|
| 1736 |
+
2002/07/30/big/img_1086
|
| 1737 |
+
2002/07/31/big/img_353
|
| 1738 |
+
2002/09/02/big/img_15275
|
| 1739 |
+
2003/01/17/big/img_555
|
| 1740 |
+
2002/08/26/big/img_212
|
| 1741 |
+
2002/08/01/big/img_1692
|
| 1742 |
+
2003/01/15/big/img_600
|
| 1743 |
+
2002/07/29/big/img_825
|
| 1744 |
+
2002/08/08/big/img_68
|
| 1745 |
+
2002/08/10/big/img_719
|
| 1746 |
+
2002/07/31/big/img_636
|
| 1747 |
+
2002/07/29/big/img_325
|
| 1748 |
+
2002/07/21/big/img_515
|
| 1749 |
+
2002/07/22/big/img_705
|
| 1750 |
+
2003/01/13/big/img_818
|
| 1751 |
+
2002/08/09/big/img_486
|
| 1752 |
+
2002/08/22/big/img_141
|
| 1753 |
+
2002/07/22/big/img_303
|
| 1754 |
+
2002/08/09/big/img_393
|
| 1755 |
+
2002/07/29/big/img_963
|
| 1756 |
+
2002/08/02/big/img_1215
|
| 1757 |
+
2002/08/19/big/img_674
|
| 1758 |
+
2002/08/12/big/img_690
|
| 1759 |
+
2002/08/21/big/img_637
|
| 1760 |
+
2002/08/21/big/img_841
|
| 1761 |
+
2002/08/24/big/img_71
|
| 1762 |
+
2002/07/25/big/img_596
|
| 1763 |
+
2002/07/24/big/img_864
|
| 1764 |
+
2002/08/18/big/img_293
|
| 1765 |
+
2003/01/14/big/img_657
|
| 1766 |
+
2002/08/15/big/img_411
|
| 1767 |
+
2002/08/16/big/img_348
|
| 1768 |
+
2002/08/05/big/img_3157
|
| 1769 |
+
2002/07/20/big/img_663
|
| 1770 |
+
2003/01/13/big/img_654
|
| 1771 |
+
2003/01/16/big/img_433
|
| 1772 |
+
2002/08/30/big/img_18200
|
| 1773 |
+
2002/08/12/big/img_226
|
| 1774 |
+
2003/01/16/big/img_491
|
| 1775 |
+
2002/08/08/big/img_666
|
| 1776 |
+
2002/07/19/big/img_576
|
| 1777 |
+
2003/01/15/big/img_776
|
| 1778 |
+
2003/01/16/big/img_899
|
| 1779 |
+
2002/07/19/big/img_397
|
| 1780 |
+
2002/08/14/big/img_44
|
| 1781 |
+
2003/01/15/big/img_762
|
| 1782 |
+
2002/08/02/big/img_982
|
| 1783 |
+
2002/09/02/big/img_15234
|
| 1784 |
+
2002/08/17/big/img_556
|
| 1785 |
+
2002/08/21/big/img_410
|
| 1786 |
+
2002/08/21/big/img_386
|
| 1787 |
+
2002/07/19/big/img_690
|
| 1788 |
+
2002/08/05/big/img_3052
|
| 1789 |
+
2002/08/14/big/img_219
|
| 1790 |
+
2002/08/16/big/img_273
|
| 1791 |
+
2003/01/15/big/img_752
|
| 1792 |
+
2002/08/08/big/img_184
|
| 1793 |
+
2002/07/31/big/img_743
|
| 1794 |
+
2002/08/23/big/img_338
|
| 1795 |
+
2003/01/14/big/img_1055
|
| 1796 |
+
2002/08/05/big/img_3405
|
| 1797 |
+
2003/01/15/big/img_17
|
| 1798 |
+
2002/08/03/big/img_141
|
| 1799 |
+
2002/08/14/big/img_549
|
| 1800 |
+
2002/07/27/big/img_1034
|
| 1801 |
+
2002/07/31/big/img_932
|
| 1802 |
+
2002/08/30/big/img_18487
|
| 1803 |
+
2002/09/02/big/img_15814
|
| 1804 |
+
2002/08/01/big/img_2086
|
| 1805 |
+
2002/09/01/big/img_16535
|
| 1806 |
+
2002/07/22/big/img_500
|
| 1807 |
+
2003/01/13/big/img_400
|
| 1808 |
+
2002/08/25/big/img_607
|
| 1809 |
+
2002/08/30/big/img_18384
|
| 1810 |
+
2003/01/14/big/img_951
|
| 1811 |
+
2002/08/13/big/img_1150
|
| 1812 |
+
2002/08/08/big/img_1022
|
| 1813 |
+
2002/08/10/big/img_428
|
| 1814 |
+
2002/08/28/big/img_19242
|
| 1815 |
+
2002/08/05/big/img_3098
|
| 1816 |
+
2002/07/23/big/img_400
|
| 1817 |
+
2002/08/26/big/img_365
|
| 1818 |
+
2002/07/20/big/img_318
|
| 1819 |
+
2002/08/13/big/img_740
|
| 1820 |
+
2003/01/16/big/img_37
|
| 1821 |
+
2002/08/26/big/img_274
|
| 1822 |
+
2002/08/02/big/img_205
|
| 1823 |
+
2002/08/21/big/img_695
|
| 1824 |
+
2002/08/06/big/img_2289
|
| 1825 |
+
2002/08/20/big/img_794
|
| 1826 |
+
2002/08/18/big/img_438
|
| 1827 |
+
2002/08/07/big/img_1380
|
| 1828 |
+
2002/08/02/big/img_737
|
| 1829 |
+
2002/08/07/big/img_1651
|
| 1830 |
+
2002/08/15/big/img_1238
|
| 1831 |
+
2002/08/01/big/img_1681
|
| 1832 |
+
2002/08/06/big/img_3017
|
| 1833 |
+
2002/07/23/big/img_706
|
| 1834 |
+
2002/07/31/big/img_392
|
| 1835 |
+
2002/08/09/big/img_539
|
| 1836 |
+
2002/07/29/big/img_835
|
| 1837 |
+
2002/08/26/big/img_723
|
| 1838 |
+
2002/08/28/big/img_19235
|
| 1839 |
+
2003/01/16/big/img_353
|
| 1840 |
+
2002/08/10/big/img_150
|
| 1841 |
+
2002/08/29/big/img_19025
|
| 1842 |
+
2002/08/21/big/img_310
|
| 1843 |
+
2002/08/10/big/img_823
|
| 1844 |
+
2002/07/26/big/img_981
|
| 1845 |
+
2002/08/11/big/img_288
|
| 1846 |
+
2002/08/19/big/img_534
|
| 1847 |
+
2002/08/21/big/img_300
|
| 1848 |
+
2002/07/31/big/img_49
|
| 1849 |
+
2002/07/30/big/img_469
|
| 1850 |
+
2002/08/28/big/img_19197
|
| 1851 |
+
2002/08/25/big/img_205
|
| 1852 |
+
2002/08/10/big/img_390
|
| 1853 |
+
2002/08/23/big/img_291
|
| 1854 |
+
2002/08/26/big/img_230
|
| 1855 |
+
2002/08/18/big/img_76
|
| 1856 |
+
2002/07/23/big/img_409
|
| 1857 |
+
2002/08/14/big/img_1053
|
| 1858 |
+
2003/01/14/big/img_291
|
| 1859 |
+
2002/08/10/big/img_503
|
| 1860 |
+
2002/08/27/big/img_19928
|
| 1861 |
+
2002/08/03/big/img_563
|
| 1862 |
+
2002/08/17/big/img_250
|
| 1863 |
+
2002/08/06/big/img_2381
|
| 1864 |
+
2002/08/17/big/img_948
|
| 1865 |
+
2002/08/06/big/img_2710
|
| 1866 |
+
2002/07/22/big/img_696
|
| 1867 |
+
2002/07/31/big/img_670
|
| 1868 |
+
2002/08/12/big/img_594
|
| 1869 |
+
2002/07/29/big/img_624
|
| 1870 |
+
2003/01/17/big/img_934
|
| 1871 |
+
2002/08/03/big/img_584
|
| 1872 |
+
2002/08/22/big/img_1003
|
| 1873 |
+
2002/08/05/big/img_3396
|
| 1874 |
+
2003/01/13/big/img_570
|
| 1875 |
+
2002/08/02/big/img_219
|
| 1876 |
+
2002/09/02/big/img_15774
|
| 1877 |
+
2002/08/16/big/img_818
|
| 1878 |
+
2002/08/23/big/img_402
|
| 1879 |
+
2003/01/14/big/img_552
|
| 1880 |
+
2002/07/29/big/img_71
|
| 1881 |
+
2002/08/05/big/img_3592
|
| 1882 |
+
2002/08/16/big/img_80
|
| 1883 |
+
2002/07/27/big/img_672
|
| 1884 |
+
2003/01/13/big/img_470
|
| 1885 |
+
2003/01/16/big/img_702
|
| 1886 |
+
2002/09/01/big/img_16130
|
| 1887 |
+
2002/08/08/big/img_240
|
| 1888 |
+
2002/09/01/big/img_16338
|
| 1889 |
+
2002/07/26/big/img_312
|
| 1890 |
+
2003/01/14/big/img_538
|
| 1891 |
+
2002/07/20/big/img_695
|
| 1892 |
+
2002/08/30/big/img_18098
|
| 1893 |
+
2002/08/25/big/img_259
|
| 1894 |
+
2002/08/16/big/img_1042
|
| 1895 |
+
2002/08/09/big/img_837
|
| 1896 |
+
2002/08/31/big/img_17760
|
| 1897 |
+
2002/07/31/big/img_14
|
| 1898 |
+
2002/08/09/big/img_361
|
| 1899 |
+
2003/01/16/big/img_107
|
| 1900 |
+
2002/08/14/big/img_124
|
| 1901 |
+
2002/07/19/big/img_463
|
| 1902 |
+
2003/01/15/big/img_275
|
| 1903 |
+
2002/07/25/big/img_1151
|
| 1904 |
+
2002/07/29/big/img_1501
|
| 1905 |
+
2002/08/27/big/img_19889
|
| 1906 |
+
2002/08/29/big/img_18603
|
| 1907 |
+
2003/01/17/big/img_601
|
| 1908 |
+
2002/08/25/big/img_355
|
| 1909 |
+
2002/08/08/big/img_297
|
| 1910 |
+
2002/08/20/big/img_290
|
| 1911 |
+
2002/07/31/big/img_195
|
| 1912 |
+
2003/01/01/big/img_336
|
| 1913 |
+
2002/08/18/big/img_369
|
| 1914 |
+
2002/07/25/big/img_621
|
| 1915 |
+
2002/08/11/big/img_508
|
| 1916 |
+
2003/01/14/big/img_458
|
| 1917 |
+
2003/01/15/big/img_795
|
| 1918 |
+
2002/08/12/big/img_498
|
| 1919 |
+
2002/08/01/big/img_1734
|
| 1920 |
+
2002/08/02/big/img_246
|
| 1921 |
+
2002/08/16/big/img_565
|
| 1922 |
+
2002/08/11/big/img_475
|
| 1923 |
+
2002/08/22/big/img_408
|
| 1924 |
+
2002/07/28/big/img_78
|
| 1925 |
+
2002/07/21/big/img_81
|
| 1926 |
+
2003/01/14/big/img_697
|
| 1927 |
+
2002/08/14/big/img_661
|
| 1928 |
+
2002/08/15/big/img_507
|
| 1929 |
+
2002/08/19/big/img_55
|
| 1930 |
+
2002/07/22/big/img_152
|
| 1931 |
+
2003/01/14/big/img_470
|
| 1932 |
+
2002/08/03/big/img_379
|
| 1933 |
+
2002/08/22/big/img_506
|
| 1934 |
+
2003/01/16/big/img_966
|
| 1935 |
+
2002/08/18/big/img_698
|
| 1936 |
+
2002/08/24/big/img_528
|
| 1937 |
+
2002/08/23/big/img_10
|
| 1938 |
+
2002/08/01/big/img_1655
|
| 1939 |
+
2002/08/22/big/img_953
|
| 1940 |
+
2002/07/19/big/img_630
|
| 1941 |
+
2002/07/22/big/img_889
|
| 1942 |
+
2002/08/16/big/img_351
|
| 1943 |
+
2003/01/16/big/img_83
|
| 1944 |
+
2002/07/19/big/img_805
|
| 1945 |
+
2002/08/14/big/img_704
|
| 1946 |
+
2002/07/19/big/img_389
|
| 1947 |
+
2002/08/31/big/img_17765
|
| 1948 |
+
2002/07/29/big/img_606
|
| 1949 |
+
2003/01/17/big/img_939
|
| 1950 |
+
2002/09/02/big/img_15081
|
| 1951 |
+
2002/08/21/big/img_181
|
| 1952 |
+
2002/07/29/big/img_1321
|
| 1953 |
+
2002/07/21/big/img_497
|
| 1954 |
+
2002/07/20/big/img_539
|
| 1955 |
+
2002/08/24/big/img_119
|
| 1956 |
+
2002/08/01/big/img_1281
|
| 1957 |
+
2002/07/26/big/img_207
|
| 1958 |
+
2002/07/26/big/img_432
|
| 1959 |
+
2002/07/27/big/img_1006
|
| 1960 |
+
2002/08/05/big/img_3087
|
| 1961 |
+
2002/08/14/big/img_252
|
| 1962 |
+
2002/08/14/big/img_798
|
| 1963 |
+
2002/07/24/big/img_538
|
| 1964 |
+
2002/09/02/big/img_15507
|
| 1965 |
+
2002/08/08/big/img_901
|
| 1966 |
+
2003/01/14/big/img_557
|
| 1967 |
+
2002/08/07/big/img_1819
|
| 1968 |
+
2002/08/04/big/img_470
|
| 1969 |
+
2002/08/01/big/img_1504
|
| 1970 |
+
2002/08/16/big/img_1070
|
| 1971 |
+
2002/08/16/big/img_372
|
| 1972 |
+
2002/08/23/big/img_416
|
| 1973 |
+
2002/08/30/big/img_18208
|
| 1974 |
+
2002/08/01/big/img_2043
|
| 1975 |
+
2002/07/22/big/img_385
|
| 1976 |
+
2002/08/22/big/img_466
|
| 1977 |
+
2002/08/21/big/img_869
|
| 1978 |
+
2002/08/28/big/img_19429
|
| 1979 |
+
2002/08/02/big/img_770
|
| 1980 |
+
2002/07/23/big/img_433
|
| 1981 |
+
2003/01/14/big/img_13
|
| 1982 |
+
2002/07/27/big/img_953
|
| 1983 |
+
2002/09/02/big/img_15728
|
| 1984 |
+
2002/08/01/big/img_1361
|
| 1985 |
+
2002/08/29/big/img_18897
|
| 1986 |
+
2002/08/26/big/img_534
|
| 1987 |
+
2002/08/11/big/img_121
|
| 1988 |
+
2002/08/26/big/img_20130
|
| 1989 |
+
2002/07/31/big/img_363
|
| 1990 |
+
2002/08/13/big/img_978
|
| 1991 |
+
2002/07/25/big/img_835
|
| 1992 |
+
2002/08/02/big/img_906
|
| 1993 |
+
2003/01/14/big/img_548
|
| 1994 |
+
2002/07/30/big/img_80
|
| 1995 |
+
2002/07/26/big/img_982
|
| 1996 |
+
2003/01/16/big/img_99
|
| 1997 |
+
2002/08/19/big/img_362
|
| 1998 |
+
2002/08/24/big/img_376
|
| 1999 |
+
2002/08/07/big/img_1264
|
| 2000 |
+
2002/07/27/big/img_938
|
| 2001 |
+
2003/01/17/big/img_535
|
| 2002 |
+
2002/07/26/big/img_457
|
| 2003 |
+
2002/08/08/big/img_848
|
| 2004 |
+
2003/01/15/big/img_859
|
| 2005 |
+
2003/01/15/big/img_622
|
| 2006 |
+
2002/07/30/big/img_403
|
| 2007 |
+
2002/07/29/big/img_217
|
| 2008 |
+
2002/07/26/big/img_891
|
| 2009 |
+
2002/07/24/big/img_70
|
| 2010 |
+
2002/08/25/big/img_619
|
| 2011 |
+
2002/08/05/big/img_3375
|
| 2012 |
+
2002/08/01/big/img_2160
|
| 2013 |
+
2002/08/06/big/img_2227
|
| 2014 |
+
2003/01/14/big/img_117
|
| 2015 |
+
2002/08/14/big/img_227
|
| 2016 |
+
2002/08/13/big/img_565
|
| 2017 |
+
2002/08/19/big/img_625
|
| 2018 |
+
2002/08/03/big/img_812
|
| 2019 |
+
2002/07/24/big/img_41
|
| 2020 |
+
2002/08/16/big/img_235
|
| 2021 |
+
2002/07/29/big/img_759
|
| 2022 |
+
2002/07/21/big/img_433
|
| 2023 |
+
2002/07/29/big/img_190
|
| 2024 |
+
2003/01/16/big/img_435
|
| 2025 |
+
2003/01/13/big/img_708
|
| 2026 |
+
2002/07/30/big/img_57
|
| 2027 |
+
2002/08/22/big/img_162
|
| 2028 |
+
2003/01/01/big/img_558
|
| 2029 |
+
2003/01/15/big/img_604
|
| 2030 |
+
2002/08/16/big/img_935
|
| 2031 |
+
2002/08/20/big/img_394
|
| 2032 |
+
2002/07/28/big/img_465
|
| 2033 |
+
2002/09/02/big/img_15534
|
| 2034 |
+
2002/08/16/big/img_87
|
| 2035 |
+
2002/07/22/big/img_469
|
| 2036 |
+
2002/08/12/big/img_245
|
| 2037 |
+
2003/01/13/big/img_236
|
| 2038 |
+
2002/08/06/big/img_2736
|
| 2039 |
+
2002/08/03/big/img_348
|
| 2040 |
+
2003/01/14/big/img_218
|
| 2041 |
+
2002/07/26/big/img_232
|
| 2042 |
+
2003/01/15/big/img_244
|
| 2043 |
+
2002/07/25/big/img_1121
|
| 2044 |
+
2002/08/01/big/img_1484
|
| 2045 |
+
2002/07/26/big/img_541
|
| 2046 |
+
2002/08/07/big/img_1244
|
| 2047 |
+
2002/07/31/big/img_3
|
| 2048 |
+
2002/08/30/big/img_18437
|
| 2049 |
+
2002/08/29/big/img_19094
|
| 2050 |
+
2002/08/01/big/img_1355
|
| 2051 |
+
2002/08/19/big/img_338
|
| 2052 |
+
2002/07/19/big/img_255
|
| 2053 |
+
2002/07/21/big/img_76
|
| 2054 |
+
2002/08/25/big/img_199
|
| 2055 |
+
2002/08/12/big/img_740
|
| 2056 |
+
2002/07/30/big/img_852
|
| 2057 |
+
2002/08/15/big/img_599
|
| 2058 |
+
2002/08/23/big/img_254
|
| 2059 |
+
2002/08/19/big/img_125
|
| 2060 |
+
2002/07/24/big/img_2
|
| 2061 |
+
2002/08/04/big/img_145
|
| 2062 |
+
2002/08/05/big/img_3137
|
| 2063 |
+
2002/07/28/big/img_463
|
| 2064 |
+
2003/01/14/big/img_801
|
| 2065 |
+
2002/07/23/big/img_366
|
| 2066 |
+
2002/08/26/big/img_600
|
| 2067 |
+
2002/08/26/big/img_649
|
| 2068 |
+
2002/09/02/big/img_15849
|
| 2069 |
+
2002/07/26/big/img_248
|
| 2070 |
+
2003/01/13/big/img_200
|
| 2071 |
+
2002/08/07/big/img_1794
|
| 2072 |
+
2002/08/31/big/img_17270
|
| 2073 |
+
2002/08/23/big/img_608
|
| 2074 |
+
2003/01/13/big/img_837
|
| 2075 |
+
2002/08/23/big/img_581
|
| 2076 |
+
2002/08/20/big/img_754
|
| 2077 |
+
2002/08/18/big/img_183
|
| 2078 |
+
2002/08/20/big/img_328
|
| 2079 |
+
2002/07/22/big/img_494
|
| 2080 |
+
2002/07/29/big/img_399
|
| 2081 |
+
2002/08/28/big/img_19284
|
| 2082 |
+
2002/08/08/big/img_566
|
| 2083 |
+
2002/07/25/big/img_376
|
| 2084 |
+
2002/07/23/big/img_138
|
| 2085 |
+
2002/07/25/big/img_435
|
| 2086 |
+
2002/08/17/big/img_685
|
| 2087 |
+
2002/07/19/big/img_90
|
| 2088 |
+
2002/07/20/big/img_716
|
| 2089 |
+
2002/08/31/big/img_17458
|
| 2090 |
+
2002/08/26/big/img_461
|
| 2091 |
+
2002/07/25/big/img_355
|
| 2092 |
+
2002/08/06/big/img_2152
|
| 2093 |
+
2002/07/27/big/img_932
|
| 2094 |
+
2002/07/23/big/img_232
|
| 2095 |
+
2002/08/08/big/img_1020
|
| 2096 |
+
2002/07/31/big/img_366
|
| 2097 |
+
2002/08/06/big/img_2667
|
| 2098 |
+
2002/08/21/big/img_465
|
| 2099 |
+
2002/08/15/big/img_305
|
| 2100 |
+
2002/08/02/big/img_247
|
| 2101 |
+
2002/07/28/big/img_46
|
| 2102 |
+
2002/08/27/big/img_19922
|
| 2103 |
+
2002/08/23/big/img_643
|
| 2104 |
+
2003/01/13/big/img_624
|
| 2105 |
+
2002/08/23/big/img_625
|
| 2106 |
+
2002/08/05/big/img_3787
|
| 2107 |
+
2003/01/13/big/img_627
|
| 2108 |
+
2002/09/01/big/img_16381
|
| 2109 |
+
2002/08/05/big/img_3668
|
| 2110 |
+
2002/07/21/big/img_535
|
| 2111 |
+
2002/08/27/big/img_19680
|
| 2112 |
+
2002/07/22/big/img_413
|
| 2113 |
+
2002/07/29/big/img_481
|
| 2114 |
+
2003/01/15/big/img_496
|
| 2115 |
+
2002/07/23/big/img_701
|
| 2116 |
+
2002/08/29/big/img_18670
|
| 2117 |
+
2002/07/28/big/img_319
|
| 2118 |
+
2003/01/14/big/img_517
|
| 2119 |
+
2002/07/26/big/img_256
|
| 2120 |
+
2003/01/16/big/img_593
|
| 2121 |
+
2002/07/30/big/img_956
|
| 2122 |
+
2002/07/30/big/img_667
|
| 2123 |
+
2002/07/25/big/img_100
|
| 2124 |
+
2002/08/11/big/img_570
|
| 2125 |
+
2002/07/26/big/img_745
|
| 2126 |
+
2002/08/04/big/img_834
|
| 2127 |
+
2002/08/25/big/img_521
|
| 2128 |
+
2002/08/01/big/img_2148
|
| 2129 |
+
2002/09/02/big/img_15183
|
| 2130 |
+
2002/08/22/big/img_514
|
| 2131 |
+
2002/08/23/big/img_477
|
| 2132 |
+
2002/07/23/big/img_336
|
| 2133 |
+
2002/07/26/big/img_481
|
| 2134 |
+
2002/08/20/big/img_409
|
| 2135 |
+
2002/07/23/big/img_918
|
| 2136 |
+
2002/08/09/big/img_474
|
| 2137 |
+
2002/08/02/big/img_929
|
| 2138 |
+
2002/08/31/big/img_17932
|
| 2139 |
+
2002/08/19/big/img_161
|
| 2140 |
+
2002/08/09/big/img_667
|
| 2141 |
+
2002/07/31/big/img_805
|
| 2142 |
+
2002/09/02/big/img_15678
|
| 2143 |
+
2002/08/31/big/img_17509
|
| 2144 |
+
2002/08/29/big/img_18998
|
| 2145 |
+
2002/07/23/big/img_301
|
| 2146 |
+
2002/08/07/big/img_1612
|
| 2147 |
+
2002/08/06/big/img_2472
|
| 2148 |
+
2002/07/23/big/img_466
|
| 2149 |
+
2002/08/27/big/img_19634
|
| 2150 |
+
2003/01/16/big/img_16
|
| 2151 |
+
2002/08/14/big/img_193
|
| 2152 |
+
2002/08/21/big/img_340
|
| 2153 |
+
2002/08/27/big/img_19799
|
| 2154 |
+
2002/08/01/big/img_1345
|
| 2155 |
+
2002/08/07/big/img_1448
|
| 2156 |
+
2002/08/11/big/img_324
|
| 2157 |
+
2003/01/16/big/img_754
|
| 2158 |
+
2002/08/13/big/img_418
|
| 2159 |
+
2003/01/16/big/img_544
|
| 2160 |
+
2002/08/19/big/img_135
|
| 2161 |
+
2002/08/10/big/img_455
|
| 2162 |
+
2002/08/10/big/img_693
|
| 2163 |
+
2002/08/31/big/img_17967
|
| 2164 |
+
2002/08/28/big/img_19229
|
| 2165 |
+
2002/08/04/big/img_811
|
| 2166 |
+
2002/09/01/big/img_16225
|
| 2167 |
+
2003/01/16/big/img_428
|
| 2168 |
+
2002/09/02/big/img_15295
|
| 2169 |
+
2002/07/26/big/img_108
|
| 2170 |
+
2002/07/21/big/img_477
|
| 2171 |
+
2002/08/07/big/img_1354
|
| 2172 |
+
2002/08/23/big/img_246
|
| 2173 |
+
2002/08/16/big/img_652
|
| 2174 |
+
2002/07/27/big/img_553
|
| 2175 |
+
2002/07/31/big/img_346
|
| 2176 |
+
2002/08/04/big/img_537
|
| 2177 |
+
2002/08/08/big/img_498
|
| 2178 |
+
2002/08/29/big/img_18956
|
| 2179 |
+
2003/01/13/big/img_922
|
| 2180 |
+
2002/08/31/big/img_17425
|
| 2181 |
+
2002/07/26/big/img_438
|
| 2182 |
+
2002/08/19/big/img_185
|
| 2183 |
+
2003/01/16/big/img_33
|
| 2184 |
+
2002/08/10/big/img_252
|
| 2185 |
+
2002/07/29/big/img_598
|
| 2186 |
+
2002/08/27/big/img_19820
|
| 2187 |
+
2002/08/06/big/img_2664
|
| 2188 |
+
2002/08/20/big/img_705
|
| 2189 |
+
2003/01/14/big/img_816
|
| 2190 |
+
2002/08/03/big/img_552
|
| 2191 |
+
2002/07/25/big/img_561
|
| 2192 |
+
2002/07/25/big/img_934
|
| 2193 |
+
2002/08/01/big/img_1893
|
| 2194 |
+
2003/01/14/big/img_746
|
| 2195 |
+
2003/01/16/big/img_519
|
| 2196 |
+
2002/08/03/big/img_681
|
| 2197 |
+
2002/07/24/big/img_808
|
| 2198 |
+
2002/08/14/big/img_803
|
| 2199 |
+
2002/08/25/big/img_155
|
| 2200 |
+
2002/07/30/big/img_1107
|
| 2201 |
+
2002/08/29/big/img_18882
|
| 2202 |
+
2003/01/15/big/img_598
|
| 2203 |
+
2002/08/19/big/img_122
|
| 2204 |
+
2002/07/30/big/img_428
|
| 2205 |
+
2002/07/24/big/img_684
|
| 2206 |
+
2002/08/22/big/img_192
|
| 2207 |
+
2002/08/22/big/img_543
|
| 2208 |
+
2002/08/07/big/img_1318
|
| 2209 |
+
2002/08/18/big/img_25
|
| 2210 |
+
2002/07/26/big/img_583
|
| 2211 |
+
2002/07/20/big/img_464
|
| 2212 |
+
2002/08/19/big/img_664
|
| 2213 |
+
2002/08/24/big/img_861
|
| 2214 |
+
2002/09/01/big/img_16136
|
| 2215 |
+
2002/08/22/big/img_400
|
| 2216 |
+
2002/08/12/big/img_445
|
| 2217 |
+
2003/01/14/big/img_174
|
| 2218 |
+
2002/08/27/big/img_19677
|
| 2219 |
+
2002/08/31/big/img_17214
|
| 2220 |
+
2002/08/30/big/img_18175
|
| 2221 |
+
2003/01/17/big/img_402
|
| 2222 |
+
2002/08/06/big/img_2396
|
| 2223 |
+
2002/08/18/big/img_448
|
| 2224 |
+
2002/08/21/big/img_165
|
| 2225 |
+
2002/08/31/big/img_17609
|
| 2226 |
+
2003/01/01/big/img_151
|
| 2227 |
+
2002/08/26/big/img_372
|
| 2228 |
+
2002/09/02/big/img_15994
|
| 2229 |
+
2002/07/26/big/img_660
|
| 2230 |
+
2002/09/02/big/img_15197
|
| 2231 |
+
2002/07/29/big/img_258
|
| 2232 |
+
2002/08/30/big/img_18525
|
| 2233 |
+
2003/01/13/big/img_368
|
| 2234 |
+
2002/07/29/big/img_1538
|
| 2235 |
+
2002/07/21/big/img_787
|
| 2236 |
+
2002/08/18/big/img_152
|
| 2237 |
+
2002/08/06/big/img_2379
|
| 2238 |
+
2003/01/17/big/img_864
|
| 2239 |
+
2002/08/27/big/img_19998
|
| 2240 |
+
2002/08/01/big/img_1634
|
| 2241 |
+
2002/07/25/big/img_414
|
| 2242 |
+
2002/08/22/big/img_627
|
| 2243 |
+
2002/08/07/big/img_1669
|
| 2244 |
+
2002/08/16/big/img_1052
|
| 2245 |
+
2002/08/31/big/img_17796
|
| 2246 |
+
2002/08/18/big/img_199
|
| 2247 |
+
2002/09/02/big/img_15147
|
| 2248 |
+
2002/08/09/big/img_460
|
| 2249 |
+
2002/08/14/big/img_581
|
| 2250 |
+
2002/08/30/big/img_18286
|
| 2251 |
+
2002/07/26/big/img_337
|
| 2252 |
+
2002/08/18/big/img_589
|
| 2253 |
+
2003/01/14/big/img_866
|
| 2254 |
+
2002/07/20/big/img_624
|
| 2255 |
+
2002/08/01/big/img_1801
|
| 2256 |
+
2002/07/24/big/img_683
|
| 2257 |
+
2002/08/09/big/img_725
|
| 2258 |
+
2003/01/14/big/img_34
|
| 2259 |
+
2002/07/30/big/img_144
|
| 2260 |
+
2002/07/30/big/img_706
|
| 2261 |
+
2002/08/08/big/img_394
|
| 2262 |
+
2002/08/19/big/img_619
|
| 2263 |
+
2002/08/06/big/img_2703
|
| 2264 |
+
2002/08/29/big/img_19034
|
| 2265 |
+
2002/07/24/big/img_67
|
| 2266 |
+
2002/08/27/big/img_19841
|
| 2267 |
+
2002/08/19/big/img_427
|
| 2268 |
+
2003/01/14/big/img_333
|
| 2269 |
+
2002/09/01/big/img_16406
|
| 2270 |
+
2002/07/19/big/img_882
|
| 2271 |
+
2002/08/17/big/img_238
|
| 2272 |
+
2003/01/14/big/img_739
|
| 2273 |
+
2002/07/22/big/img_151
|
| 2274 |
+
2002/08/21/big/img_743
|
| 2275 |
+
2002/07/25/big/img_1048
|
| 2276 |
+
2002/07/30/big/img_395
|
| 2277 |
+
2003/01/13/big/img_584
|
| 2278 |
+
2002/08/13/big/img_742
|
| 2279 |
+
2002/08/13/big/img_1168
|
| 2280 |
+
2003/01/14/big/img_147
|
| 2281 |
+
2002/07/26/big/img_803
|
| 2282 |
+
2002/08/05/big/img_3298
|
| 2283 |
+
2002/08/07/big/img_1451
|
| 2284 |
+
2002/08/16/big/img_424
|
| 2285 |
+
2002/07/29/big/img_1069
|
| 2286 |
+
2002/09/01/big/img_16735
|
| 2287 |
+
2002/07/21/big/img_637
|
| 2288 |
+
2003/01/14/big/img_585
|
| 2289 |
+
2002/08/02/big/img_358
|
| 2290 |
+
2003/01/13/big/img_358
|
| 2291 |
+
2002/08/14/big/img_198
|
| 2292 |
+
2002/08/17/big/img_935
|
| 2293 |
+
2002/08/04/big/img_42
|
| 2294 |
+
2002/08/30/big/img_18245
|
| 2295 |
+
2002/07/25/big/img_158
|
| 2296 |
+
2002/08/22/big/img_744
|
| 2297 |
+
2002/08/06/big/img_2291
|
| 2298 |
+
2002/08/05/big/img_3044
|
| 2299 |
+
2002/07/30/big/img_272
|
| 2300 |
+
2002/08/23/big/img_641
|
| 2301 |
+
2002/07/24/big/img_797
|
| 2302 |
+
2002/07/30/big/img_392
|
| 2303 |
+
2003/01/14/big/img_447
|
| 2304 |
+
2002/07/31/big/img_898
|
| 2305 |
+
2002/08/06/big/img_2812
|
| 2306 |
+
2002/08/13/big/img_564
|
| 2307 |
+
2002/07/22/big/img_43
|
| 2308 |
+
2002/07/26/big/img_634
|
| 2309 |
+
2002/07/19/big/img_843
|
| 2310 |
+
2002/08/26/big/img_58
|
| 2311 |
+
2002/07/21/big/img_375
|
| 2312 |
+
2002/08/25/big/img_729
|
| 2313 |
+
2002/07/19/big/img_561
|
| 2314 |
+
2003/01/15/big/img_884
|
| 2315 |
+
2002/07/25/big/img_891
|
| 2316 |
+
2002/08/09/big/img_558
|
| 2317 |
+
2002/08/26/big/img_587
|
| 2318 |
+
2002/08/13/big/img_1146
|
| 2319 |
+
2002/09/02/big/img_15153
|
| 2320 |
+
2002/07/26/big/img_316
|
| 2321 |
+
2002/08/01/big/img_1940
|
| 2322 |
+
2002/08/26/big/img_90
|
| 2323 |
+
2003/01/13/big/img_347
|
| 2324 |
+
2002/07/25/big/img_520
|
| 2325 |
+
2002/08/29/big/img_18718
|
| 2326 |
+
2002/08/28/big/img_19219
|
| 2327 |
+
2002/08/13/big/img_375
|
| 2328 |
+
2002/07/20/big/img_719
|
| 2329 |
+
2002/08/31/big/img_17431
|
| 2330 |
+
2002/07/28/big/img_192
|
| 2331 |
+
2002/08/26/big/img_259
|
| 2332 |
+
2002/08/18/big/img_484
|
| 2333 |
+
2002/07/29/big/img_580
|
| 2334 |
+
2002/07/26/big/img_84
|
| 2335 |
+
2002/08/02/big/img_302
|
| 2336 |
+
2002/08/31/big/img_17007
|
| 2337 |
+
2003/01/15/big/img_543
|
| 2338 |
+
2002/09/01/big/img_16488
|
| 2339 |
+
2002/08/22/big/img_798
|
| 2340 |
+
2002/07/30/big/img_383
|
| 2341 |
+
2002/08/04/big/img_668
|
| 2342 |
+
2002/08/13/big/img_156
|
| 2343 |
+
2002/08/07/big/img_1353
|
| 2344 |
+
2002/07/25/big/img_281
|
| 2345 |
+
2003/01/14/big/img_587
|
| 2346 |
+
2003/01/15/big/img_524
|
| 2347 |
+
2002/08/19/big/img_726
|
| 2348 |
+
2002/08/21/big/img_709
|
| 2349 |
+
2002/08/26/big/img_465
|
| 2350 |
+
2002/07/31/big/img_658
|
| 2351 |
+
2002/08/28/big/img_19148
|
| 2352 |
+
2002/07/23/big/img_423
|
| 2353 |
+
2002/08/16/big/img_758
|
| 2354 |
+
2002/08/22/big/img_523
|
| 2355 |
+
2002/08/16/big/img_591
|
| 2356 |
+
2002/08/23/big/img_845
|
| 2357 |
+
2002/07/26/big/img_678
|
| 2358 |
+
2002/08/09/big/img_806
|
| 2359 |
+
2002/08/06/big/img_2369
|
| 2360 |
+
2002/07/29/big/img_457
|
| 2361 |
+
2002/07/19/big/img_278
|
| 2362 |
+
2002/08/30/big/img_18107
|
| 2363 |
+
2002/07/26/big/img_444
|
| 2364 |
+
2002/08/20/big/img_278
|
| 2365 |
+
2002/08/26/big/img_92
|
| 2366 |
+
2002/08/26/big/img_257
|
| 2367 |
+
2002/07/25/big/img_266
|
| 2368 |
+
2002/08/05/big/img_3829
|
| 2369 |
+
2002/07/26/big/img_757
|
| 2370 |
+
2002/07/29/big/img_1536
|
| 2371 |
+
2002/08/09/big/img_472
|
| 2372 |
+
2003/01/17/big/img_480
|
| 2373 |
+
2002/08/28/big/img_19355
|
| 2374 |
+
2002/07/26/big/img_97
|
| 2375 |
+
2002/08/06/big/img_2503
|
| 2376 |
+
2002/07/19/big/img_254
|
| 2377 |
+
2002/08/01/big/img_1470
|
| 2378 |
+
2002/08/21/big/img_42
|
| 2379 |
+
2002/08/20/big/img_217
|
| 2380 |
+
2002/08/06/big/img_2459
|
| 2381 |
+
2002/07/19/big/img_552
|
| 2382 |
+
2002/08/13/big/img_717
|
| 2383 |
+
2002/08/12/big/img_586
|
| 2384 |
+
2002/08/20/big/img_411
|
| 2385 |
+
2003/01/13/big/img_768
|
| 2386 |
+
2002/08/07/big/img_1747
|
| 2387 |
+
2002/08/15/big/img_385
|
| 2388 |
+
2002/08/01/big/img_1648
|
| 2389 |
+
2002/08/15/big/img_311
|
| 2390 |
+
2002/08/21/big/img_95
|
| 2391 |
+
2002/08/09/big/img_108
|
| 2392 |
+
2002/08/21/big/img_398
|
| 2393 |
+
2002/08/17/big/img_340
|
| 2394 |
+
2002/08/14/big/img_474
|
| 2395 |
+
2002/08/13/big/img_294
|
| 2396 |
+
2002/08/24/big/img_840
|
| 2397 |
+
2002/08/09/big/img_808
|
| 2398 |
+
2002/08/23/big/img_491
|
| 2399 |
+
2002/07/28/big/img_33
|
| 2400 |
+
2003/01/13/big/img_664
|
| 2401 |
+
2002/08/02/big/img_261
|
| 2402 |
+
2002/08/09/big/img_591
|
| 2403 |
+
2002/07/26/big/img_309
|
| 2404 |
+
2003/01/14/big/img_372
|
| 2405 |
+
2002/08/19/big/img_581
|
| 2406 |
+
2002/08/19/big/img_168
|
| 2407 |
+
2002/08/26/big/img_422
|
| 2408 |
+
2002/07/24/big/img_106
|
| 2409 |
+
2002/08/01/big/img_1936
|
| 2410 |
+
2002/08/05/big/img_3764
|
| 2411 |
+
2002/08/21/big/img_266
|
| 2412 |
+
2002/08/31/big/img_17968
|
| 2413 |
+
2002/08/01/big/img_1941
|
| 2414 |
+
2002/08/15/big/img_550
|
| 2415 |
+
2002/08/14/big/img_13
|
| 2416 |
+
2002/07/30/big/img_171
|
| 2417 |
+
2003/01/13/big/img_490
|
| 2418 |
+
2002/07/25/big/img_427
|
| 2419 |
+
2002/07/19/big/img_770
|
| 2420 |
+
2002/08/12/big/img_759
|
| 2421 |
+
2003/01/15/big/img_1360
|
| 2422 |
+
2002/08/05/big/img_3692
|
| 2423 |
+
2003/01/16/big/img_30
|
| 2424 |
+
2002/07/25/big/img_1026
|
| 2425 |
+
2002/07/22/big/img_288
|
| 2426 |
+
2002/08/29/big/img_18801
|
| 2427 |
+
2002/07/24/big/img_793
|
| 2428 |
+
2002/08/13/big/img_178
|
| 2429 |
+
2002/08/06/big/img_2322
|
| 2430 |
+
2003/01/14/big/img_560
|
| 2431 |
+
2002/08/18/big/img_408
|
| 2432 |
+
2003/01/16/big/img_915
|
| 2433 |
+
2003/01/16/big/img_679
|
| 2434 |
+
2002/08/07/big/img_1552
|
| 2435 |
+
2002/08/29/big/img_19050
|
| 2436 |
+
2002/08/01/big/img_2172
|
| 2437 |
+
2002/07/31/big/img_30
|
| 2438 |
+
2002/07/30/big/img_1019
|
| 2439 |
+
2002/07/30/big/img_587
|
| 2440 |
+
2003/01/13/big/img_773
|
| 2441 |
+
2002/07/30/big/img_410
|
| 2442 |
+
2002/07/28/big/img_65
|
| 2443 |
+
2002/08/05/big/img_3138
|
| 2444 |
+
2002/07/23/big/img_541
|
| 2445 |
+
2002/08/22/big/img_963
|
| 2446 |
+
2002/07/27/big/img_657
|
| 2447 |
+
2002/07/30/big/img_1051
|
| 2448 |
+
2003/01/16/big/img_150
|
| 2449 |
+
2002/07/31/big/img_519
|
| 2450 |
+
2002/08/01/big/img_1961
|
| 2451 |
+
2002/08/05/big/img_3752
|
| 2452 |
+
2002/07/23/big/img_631
|
| 2453 |
+
2003/01/14/big/img_237
|
| 2454 |
+
2002/07/28/big/img_21
|
| 2455 |
+
2002/07/22/big/img_813
|
| 2456 |
+
2002/08/05/big/img_3563
|
| 2457 |
+
2003/01/17/big/img_620
|
| 2458 |
+
2002/07/19/big/img_523
|
| 2459 |
+
2002/07/30/big/img_904
|
| 2460 |
+
2002/08/29/big/img_18642
|
| 2461 |
+
2002/08/11/big/img_492
|
| 2462 |
+
2002/08/01/big/img_2130
|
| 2463 |
+
2002/07/25/big/img_618
|
| 2464 |
+
2002/08/17/big/img_305
|
| 2465 |
+
2003/01/16/big/img_520
|
| 2466 |
+
2002/07/26/big/img_495
|
| 2467 |
+
2002/08/17/big/img_164
|
| 2468 |
+
2002/08/03/big/img_440
|
| 2469 |
+
2002/07/24/big/img_441
|
| 2470 |
+
2002/08/06/big/img_2146
|
| 2471 |
+
2002/08/11/big/img_558
|
| 2472 |
+
2002/08/02/big/img_545
|
| 2473 |
+
2002/08/31/big/img_18090
|
| 2474 |
+
2003/01/01/big/img_136
|
| 2475 |
+
2002/07/25/big/img_1099
|
| 2476 |
+
2003/01/13/big/img_728
|
| 2477 |
+
2003/01/16/big/img_197
|
| 2478 |
+
2002/07/26/big/img_651
|
| 2479 |
+
2002/08/11/big/img_676
|
| 2480 |
+
2003/01/15/big/img_10
|
| 2481 |
+
2002/08/21/big/img_250
|
| 2482 |
+
2002/08/14/big/img_325
|
| 2483 |
+
2002/08/04/big/img_390
|
| 2484 |
+
2002/07/24/big/img_554
|
| 2485 |
+
2003/01/16/big/img_333
|
| 2486 |
+
2002/07/31/big/img_922
|
| 2487 |
+
2002/09/02/big/img_15586
|
| 2488 |
+
2003/01/16/big/img_184
|
| 2489 |
+
2002/07/22/big/img_766
|
| 2490 |
+
2002/07/21/big/img_608
|
| 2491 |
+
2002/08/07/big/img_1578
|
| 2492 |
+
2002/08/17/big/img_961
|
| 2493 |
+
2002/07/27/big/img_324
|
| 2494 |
+
2002/08/05/big/img_3765
|
| 2495 |
+
2002/08/23/big/img_462
|
| 2496 |
+
2003/01/16/big/img_382
|
| 2497 |
+
2002/08/27/big/img_19838
|
| 2498 |
+
2002/08/01/big/img_1505
|
| 2499 |
+
2002/08/21/big/img_662
|
| 2500 |
+
2002/08/14/big/img_605
|
| 2501 |
+
2002/08/19/big/img_816
|
| 2502 |
+
2002/07/29/big/img_136
|
| 2503 |
+
2002/08/20/big/img_719
|
| 2504 |
+
2002/08/06/big/img_2826
|
| 2505 |
+
2002/08/10/big/img_630
|
| 2506 |
+
2003/01/17/big/img_973
|
| 2507 |
+
2002/08/14/big/img_116
|
| 2508 |
+
2002/08/02/big/img_666
|
| 2509 |
+
2002/08/21/big/img_710
|
| 2510 |
+
2002/08/05/big/img_55
|
| 2511 |
+
2002/07/31/big/img_229
|
| 2512 |
+
2002/08/01/big/img_1549
|
| 2513 |
+
2002/07/23/big/img_432
|
| 2514 |
+
2002/07/21/big/img_430
|
| 2515 |
+
2002/08/21/big/img_549
|
| 2516 |
+
2002/08/08/big/img_985
|
| 2517 |
+
2002/07/20/big/img_610
|
| 2518 |
+
2002/07/23/big/img_978
|
| 2519 |
+
2002/08/23/big/img_219
|
| 2520 |
+
2002/07/25/big/img_175
|
| 2521 |
+
2003/01/15/big/img_230
|
| 2522 |
+
2002/08/23/big/img_385
|
| 2523 |
+
2002/07/31/big/img_879
|
| 2524 |
+
2002/08/12/big/img_495
|
| 2525 |
+
2002/08/22/big/img_499
|
| 2526 |
+
2002/08/30/big/img_18322
|
| 2527 |
+
2002/08/15/big/img_795
|
| 2528 |
+
2002/08/13/big/img_835
|
| 2529 |
+
2003/01/17/big/img_930
|
| 2530 |
+
2002/07/30/big/img_873
|
| 2531 |
+
2002/08/11/big/img_257
|
| 2532 |
+
2002/07/31/big/img_593
|
| 2533 |
+
2002/08/21/big/img_916
|
| 2534 |
+
2003/01/13/big/img_814
|
| 2535 |
+
2002/07/25/big/img_722
|
| 2536 |
+
2002/08/16/big/img_379
|
| 2537 |
+
2002/07/31/big/img_497
|
| 2538 |
+
2002/07/22/big/img_602
|
| 2539 |
+
2002/08/21/big/img_642
|
| 2540 |
+
2002/08/21/big/img_614
|
| 2541 |
+
2002/08/23/big/img_482
|
| 2542 |
+
2002/07/29/big/img_603
|
| 2543 |
+
2002/08/13/big/img_705
|
| 2544 |
+
2002/07/23/big/img_833
|
| 2545 |
+
2003/01/14/big/img_511
|
| 2546 |
+
2002/07/24/big/img_376
|
| 2547 |
+
2002/08/17/big/img_1030
|
| 2548 |
+
2002/08/05/big/img_3576
|
| 2549 |
+
2002/08/16/big/img_540
|
| 2550 |
+
2002/07/22/big/img_630
|
| 2551 |
+
2002/08/10/big/img_180
|
| 2552 |
+
2002/08/14/big/img_905
|
| 2553 |
+
2002/08/29/big/img_18777
|
| 2554 |
+
2002/08/22/big/img_693
|
| 2555 |
+
2003/01/16/big/img_933
|
| 2556 |
+
2002/08/20/big/img_555
|
| 2557 |
+
2002/08/15/big/img_549
|
| 2558 |
+
2003/01/14/big/img_830
|
| 2559 |
+
2003/01/16/big/img_64
|
| 2560 |
+
2002/08/27/big/img_19670
|
| 2561 |
+
2002/08/22/big/img_729
|
| 2562 |
+
2002/07/27/big/img_981
|
| 2563 |
+
2002/08/09/big/img_458
|
| 2564 |
+
2003/01/17/big/img_884
|
| 2565 |
+
2002/07/25/big/img_639
|
| 2566 |
+
2002/08/31/big/img_18008
|
| 2567 |
+
2002/08/22/big/img_249
|
| 2568 |
+
2002/08/17/big/img_971
|
| 2569 |
+
2002/08/04/big/img_308
|
| 2570 |
+
2002/07/28/big/img_362
|
| 2571 |
+
2002/08/12/big/img_142
|
| 2572 |
+
2002/08/26/big/img_61
|
| 2573 |
+
2002/08/14/big/img_422
|
| 2574 |
+
2002/07/19/big/img_607
|
| 2575 |
+
2003/01/15/big/img_717
|
| 2576 |
+
2002/08/01/big/img_1475
|
| 2577 |
+
2002/08/29/big/img_19061
|
| 2578 |
+
2003/01/01/big/img_346
|
| 2579 |
+
2002/07/20/big/img_315
|
| 2580 |
+
2003/01/15/big/img_756
|
| 2581 |
+
2002/08/15/big/img_879
|
| 2582 |
+
2002/08/08/big/img_615
|
| 2583 |
+
2003/01/13/big/img_431
|
| 2584 |
+
2002/08/05/big/img_3233
|
| 2585 |
+
2002/08/24/big/img_526
|
| 2586 |
+
2003/01/13/big/img_717
|
| 2587 |
+
2002/09/01/big/img_16408
|
| 2588 |
+
2002/07/22/big/img_217
|
| 2589 |
+
2002/07/31/big/img_960
|
| 2590 |
+
2002/08/21/big/img_610
|
| 2591 |
+
2002/08/05/big/img_3753
|
| 2592 |
+
2002/08/03/big/img_151
|
| 2593 |
+
2002/08/21/big/img_267
|
| 2594 |
+
2002/08/01/big/img_2175
|
| 2595 |
+
2002/08/04/big/img_556
|
| 2596 |
+
2002/08/21/big/img_527
|
| 2597 |
+
2002/09/02/big/img_15800
|
| 2598 |
+
2002/07/27/big/img_156
|
| 2599 |
+
2002/07/20/big/img_590
|
| 2600 |
+
2002/08/15/big/img_700
|
| 2601 |
+
2002/08/08/big/img_444
|
| 2602 |
+
2002/07/25/big/img_94
|
| 2603 |
+
2002/07/24/big/img_778
|
| 2604 |
+
2002/08/14/big/img_694
|
| 2605 |
+
2002/07/20/big/img_666
|
| 2606 |
+
2002/08/02/big/img_200
|
| 2607 |
+
2002/08/02/big/img_578
|
| 2608 |
+
2003/01/17/big/img_332
|
| 2609 |
+
2002/09/01/big/img_16352
|
| 2610 |
+
2002/08/27/big/img_19668
|
| 2611 |
+
2002/07/23/big/img_823
|
| 2612 |
+
2002/08/13/big/img_431
|
| 2613 |
+
2003/01/16/big/img_463
|
| 2614 |
+
2002/08/27/big/img_19711
|
| 2615 |
+
2002/08/23/big/img_154
|
| 2616 |
+
2002/07/31/big/img_360
|
| 2617 |
+
2002/08/23/big/img_555
|
| 2618 |
+
2002/08/10/big/img_561
|
| 2619 |
+
2003/01/14/big/img_550
|
| 2620 |
+
2002/08/07/big/img_1370
|
| 2621 |
+
2002/07/30/big/img_1184
|
| 2622 |
+
2002/08/01/big/img_1445
|
| 2623 |
+
2002/08/23/big/img_22
|
| 2624 |
+
2002/07/30/big/img_606
|
| 2625 |
+
2003/01/17/big/img_271
|
| 2626 |
+
2002/08/31/big/img_17316
|
| 2627 |
+
2002/08/16/big/img_973
|
| 2628 |
+
2002/07/26/big/img_77
|
| 2629 |
+
2002/07/20/big/img_788
|
| 2630 |
+
2002/08/06/big/img_2426
|
| 2631 |
+
2002/08/07/big/img_1498
|
| 2632 |
+
2002/08/16/big/img_358
|
| 2633 |
+
2002/08/06/big/img_2851
|
| 2634 |
+
2002/08/12/big/img_359
|
| 2635 |
+
2002/08/01/big/img_1521
|
| 2636 |
+
2002/08/02/big/img_709
|
| 2637 |
+
2002/08/20/big/img_935
|
| 2638 |
+
2002/08/12/big/img_188
|
| 2639 |
+
2002/08/24/big/img_411
|
| 2640 |
+
2002/08/22/big/img_680
|
| 2641 |
+
2002/08/06/big/img_2480
|
| 2642 |
+
2002/07/20/big/img_627
|
| 2643 |
+
2002/07/30/big/img_214
|
| 2644 |
+
2002/07/25/big/img_354
|
| 2645 |
+
2002/08/02/big/img_636
|
| 2646 |
+
2003/01/15/big/img_661
|
| 2647 |
+
2002/08/07/big/img_1327
|
| 2648 |
+
2002/08/01/big/img_2108
|
| 2649 |
+
2002/08/31/big/img_17919
|
| 2650 |
+
2002/08/29/big/img_18768
|
| 2651 |
+
2002/08/05/big/img_3840
|
| 2652 |
+
2002/07/26/big/img_242
|
| 2653 |
+
2003/01/14/big/img_451
|
| 2654 |
+
2002/08/20/big/img_923
|
| 2655 |
+
2002/08/27/big/img_19908
|
| 2656 |
+
2002/08/16/big/img_282
|
| 2657 |
+
2002/08/19/big/img_440
|
| 2658 |
+
2003/01/01/big/img_230
|
| 2659 |
+
2002/08/08/big/img_212
|
| 2660 |
+
2002/07/20/big/img_443
|
| 2661 |
+
2002/08/25/big/img_635
|
| 2662 |
+
2003/01/13/big/img_1169
|
| 2663 |
+
2002/07/26/big/img_998
|
| 2664 |
+
2002/08/15/big/img_995
|
| 2665 |
+
2002/08/06/big/img_3002
|
| 2666 |
+
2002/07/29/big/img_460
|
| 2667 |
+
2003/01/14/big/img_925
|
| 2668 |
+
2002/07/23/big/img_539
|
| 2669 |
+
2002/08/16/big/img_694
|
| 2670 |
+
2003/01/13/big/img_459
|
| 2671 |
+
2002/07/23/big/img_249
|
| 2672 |
+
2002/08/20/big/img_539
|
| 2673 |
+
2002/08/04/big/img_186
|
| 2674 |
+
2002/08/26/big/img_264
|
| 2675 |
+
2002/07/22/big/img_704
|
| 2676 |
+
2002/08/25/big/img_277
|
| 2677 |
+
2002/08/22/big/img_988
|
| 2678 |
+
2002/07/29/big/img_504
|
| 2679 |
+
2002/08/05/big/img_3600
|
| 2680 |
+
2002/08/30/big/img_18380
|
| 2681 |
+
2003/01/14/big/img_937
|
| 2682 |
+
2002/08/21/big/img_254
|
| 2683 |
+
2002/08/10/big/img_130
|
| 2684 |
+
2002/08/20/big/img_339
|
| 2685 |
+
2003/01/14/big/img_428
|
| 2686 |
+
2002/08/20/big/img_889
|
| 2687 |
+
2002/08/31/big/img_17637
|
| 2688 |
+
2002/07/26/big/img_644
|
| 2689 |
+
2002/09/01/big/img_16776
|
| 2690 |
+
2002/08/06/big/img_2239
|
| 2691 |
+
2002/08/06/big/img_2646
|
| 2692 |
+
2003/01/13/big/img_491
|
| 2693 |
+
2002/08/10/big/img_579
|
| 2694 |
+
2002/08/21/big/img_713
|
| 2695 |
+
2002/08/22/big/img_482
|
| 2696 |
+
2002/07/22/big/img_167
|
| 2697 |
+
2002/07/24/big/img_539
|
| 2698 |
+
2002/08/14/big/img_721
|
| 2699 |
+
2002/07/25/big/img_389
|
| 2700 |
+
2002/09/01/big/img_16591
|
| 2701 |
+
2002/08/13/big/img_543
|
| 2702 |
+
2003/01/14/big/img_432
|
| 2703 |
+
2002/08/09/big/img_287
|
| 2704 |
+
2002/07/26/big/img_126
|
| 2705 |
+
2002/08/23/big/img_412
|
| 2706 |
+
2002/08/15/big/img_1034
|
| 2707 |
+
2002/08/28/big/img_19485
|
| 2708 |
+
2002/07/31/big/img_236
|
| 2709 |
+
2002/07/30/big/img_523
|
| 2710 |
+
2002/07/19/big/img_141
|
| 2711 |
+
2003/01/17/big/img_957
|
| 2712 |
+
2002/08/04/big/img_81
|
| 2713 |
+
2002/07/25/big/img_206
|
| 2714 |
+
2002/08/15/big/img_716
|
| 2715 |
+
2002/08/13/big/img_403
|
| 2716 |
+
2002/08/15/big/img_685
|
| 2717 |
+
2002/07/26/big/img_884
|
| 2718 |
+
2002/07/19/big/img_499
|
| 2719 |
+
2002/07/23/big/img_772
|
| 2720 |
+
2002/07/27/big/img_752
|
| 2721 |
+
2003/01/14/big/img_493
|
| 2722 |
+
2002/08/25/big/img_664
|
| 2723 |
+
2002/07/31/big/img_334
|
| 2724 |
+
2002/08/26/big/img_678
|
| 2725 |
+
2002/09/01/big/img_16541
|
| 2726 |
+
2003/01/14/big/img_347
|
| 2727 |
+
2002/07/23/big/img_187
|
| 2728 |
+
2002/07/30/big/img_1163
|
| 2729 |
+
2002/08/05/big/img_35
|
| 2730 |
+
2002/08/22/big/img_944
|
| 2731 |
+
2002/08/07/big/img_1239
|
| 2732 |
+
2002/07/29/big/img_1215
|
| 2733 |
+
2002/08/03/big/img_312
|
| 2734 |
+
2002/08/05/big/img_3523
|
| 2735 |
+
2002/07/29/big/img_218
|
| 2736 |
+
2002/08/13/big/img_672
|
| 2737 |
+
2002/08/16/big/img_205
|
| 2738 |
+
2002/08/17/big/img_594
|
| 2739 |
+
2002/07/29/big/img_1411
|
| 2740 |
+
2002/07/30/big/img_942
|
| 2741 |
+
2003/01/16/big/img_312
|
| 2742 |
+
2002/08/08/big/img_312
|
| 2743 |
+
2002/07/25/big/img_15
|
| 2744 |
+
2002/08/09/big/img_839
|
| 2745 |
+
2002/08/01/big/img_2069
|
| 2746 |
+
2002/08/31/big/img_17512
|
| 2747 |
+
2002/08/01/big/img_3
|
| 2748 |
+
2002/07/31/big/img_320
|
| 2749 |
+
2003/01/15/big/img_1265
|
| 2750 |
+
2002/08/14/big/img_563
|
| 2751 |
+
2002/07/31/big/img_167
|
| 2752 |
+
2002/08/20/big/img_374
|
| 2753 |
+
2002/08/13/big/img_406
|
| 2754 |
+
2002/08/08/big/img_625
|
| 2755 |
+
2002/08/02/big/img_314
|
| 2756 |
+
2002/08/27/big/img_19964
|
| 2757 |
+
2002/09/01/big/img_16670
|
| 2758 |
+
2002/07/31/big/img_599
|
| 2759 |
+
2002/08/29/big/img_18906
|
| 2760 |
+
2002/07/24/big/img_373
|
| 2761 |
+
2002/07/26/big/img_513
|
| 2762 |
+
2002/09/02/big/img_15497
|
| 2763 |
+
2002/08/19/big/img_117
|
| 2764 |
+
2003/01/01/big/img_158
|
| 2765 |
+
2002/08/24/big/img_178
|
| 2766 |
+
2003/01/13/big/img_935
|
| 2767 |
+
2002/08/13/big/img_609
|
| 2768 |
+
2002/08/30/big/img_18341
|
| 2769 |
+
2002/08/25/big/img_674
|
| 2770 |
+
2003/01/13/big/img_209
|
| 2771 |
+
2002/08/13/big/img_258
|
| 2772 |
+
2002/08/05/big/img_3543
|
| 2773 |
+
2002/08/07/big/img_1970
|
| 2774 |
+
2002/08/06/big/img_3004
|
| 2775 |
+
2003/01/17/big/img_487
|
| 2776 |
+
2002/08/24/big/img_873
|
| 2777 |
+
2002/08/29/big/img_18730
|
| 2778 |
+
2002/08/09/big/img_375
|
| 2779 |
+
2003/01/16/big/img_751
|
| 2780 |
+
2002/08/02/big/img_603
|
| 2781 |
+
2002/08/19/big/img_325
|
| 2782 |
+
2002/09/01/big/img_16420
|
| 2783 |
+
2002/08/05/big/img_3633
|
| 2784 |
+
2002/08/21/big/img_516
|
| 2785 |
+
2002/07/19/big/img_501
|
| 2786 |
+
2002/07/26/big/img_688
|
| 2787 |
+
2002/07/24/big/img_256
|
| 2788 |
+
2002/07/25/big/img_438
|
| 2789 |
+
2002/07/31/big/img_1017
|
| 2790 |
+
2002/08/22/big/img_512
|
| 2791 |
+
2002/07/21/big/img_543
|
| 2792 |
+
2002/08/08/big/img_223
|
| 2793 |
+
2002/08/19/big/img_189
|
| 2794 |
+
2002/08/12/big/img_630
|
| 2795 |
+
2002/07/30/big/img_958
|
| 2796 |
+
2002/07/28/big/img_208
|
| 2797 |
+
2002/08/31/big/img_17691
|
| 2798 |
+
2002/07/22/big/img_542
|
| 2799 |
+
2002/07/19/big/img_741
|
| 2800 |
+
2002/07/19/big/img_158
|
| 2801 |
+
2002/08/15/big/img_399
|
| 2802 |
+
2002/08/01/big/img_2159
|
| 2803 |
+
2002/08/14/big/img_455
|
| 2804 |
+
2002/08/17/big/img_1011
|
| 2805 |
+
2002/08/26/big/img_744
|
| 2806 |
+
2002/08/12/big/img_624
|
| 2807 |
+
2003/01/17/big/img_821
|
| 2808 |
+
2002/08/16/big/img_980
|
| 2809 |
+
2002/07/28/big/img_281
|
| 2810 |
+
2002/07/25/big/img_171
|
| 2811 |
+
2002/08/03/big/img_116
|
| 2812 |
+
2002/07/22/big/img_467
|
| 2813 |
+
2002/07/31/big/img_750
|
| 2814 |
+
2002/07/26/big/img_435
|
| 2815 |
+
2002/07/19/big/img_822
|
| 2816 |
+
2002/08/13/big/img_626
|
| 2817 |
+
2002/08/11/big/img_344
|
| 2818 |
+
2002/08/02/big/img_473
|
| 2819 |
+
2002/09/01/big/img_16817
|
| 2820 |
+
2002/08/01/big/img_1275
|
| 2821 |
+
2002/08/28/big/img_19270
|
| 2822 |
+
2002/07/23/big/img_607
|
| 2823 |
+
2002/08/09/big/img_316
|
| 2824 |
+
2002/07/29/big/img_626
|
| 2825 |
+
2002/07/24/big/img_824
|
| 2826 |
+
2002/07/22/big/img_342
|
| 2827 |
+
2002/08/08/big/img_794
|
| 2828 |
+
2002/08/07/big/img_1209
|
| 2829 |
+
2002/07/19/big/img_18
|
| 2830 |
+
2002/08/25/big/img_634
|
| 2831 |
+
2002/07/24/big/img_730
|
| 2832 |
+
2003/01/17/big/img_356
|
| 2833 |
+
2002/07/23/big/img_305
|
| 2834 |
+
2002/07/30/big/img_453
|
| 2835 |
+
2003/01/13/big/img_972
|
| 2836 |
+
2002/08/06/big/img_2610
|
| 2837 |
+
2002/08/29/big/img_18920
|
| 2838 |
+
2002/07/31/big/img_123
|
| 2839 |
+
2002/07/26/big/img_979
|
| 2840 |
+
2002/08/24/big/img_635
|
| 2841 |
+
2002/08/05/big/img_3704
|
| 2842 |
+
2002/08/07/big/img_1358
|
| 2843 |
+
2002/07/22/big/img_306
|
| 2844 |
+
2002/08/13/big/img_619
|
| 2845 |
+
2002/08/02/big/img_366
|
third_party/GPEN/face_detect/data/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .wider_face import WiderFaceDetection, detection_collate
|
| 2 |
+
from .data_augment import *
|
| 3 |
+
from .config import *
|
third_party/GPEN/face_detect/data/config.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# config.py
|
| 2 |
+
|
| 3 |
+
cfg_mnet = {
|
| 4 |
+
'name': 'mobilenet0.25',
|
| 5 |
+
'min_sizes': [[16, 32], [64, 128], [256, 512]],
|
| 6 |
+
'steps': [8, 16, 32],
|
| 7 |
+
'variance': [0.1, 0.2],
|
| 8 |
+
'clip': False,
|
| 9 |
+
'loc_weight': 2.0,
|
| 10 |
+
'gpu_train': True,
|
| 11 |
+
'batch_size': 32,
|
| 12 |
+
'ngpu': 1,
|
| 13 |
+
'epoch': 250,
|
| 14 |
+
'decay1': 190,
|
| 15 |
+
'decay2': 220,
|
| 16 |
+
'image_size': 640,
|
| 17 |
+
'pretrain': False,
|
| 18 |
+
'return_layers': {'stage1': 1, 'stage2': 2, 'stage3': 3},
|
| 19 |
+
'in_channel': 32,
|
| 20 |
+
'out_channel': 64
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
cfg_re50 = {
|
| 24 |
+
'name': 'Resnet50',
|
| 25 |
+
'min_sizes': [[16, 32], [64, 128], [256, 512]],
|
| 26 |
+
'steps': [8, 16, 32],
|
| 27 |
+
'variance': [0.1, 0.2],
|
| 28 |
+
'clip': False,
|
| 29 |
+
'loc_weight': 2.0,
|
| 30 |
+
'gpu_train': True,
|
| 31 |
+
'batch_size': 24,
|
| 32 |
+
'ngpu': 4,
|
| 33 |
+
'epoch': 100,
|
| 34 |
+
'decay1': 70,
|
| 35 |
+
'decay2': 90,
|
| 36 |
+
'image_size': 840,
|
| 37 |
+
'pretrain': False,
|
| 38 |
+
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
|
| 39 |
+
'in_channel': 256,
|
| 40 |
+
'out_channel': 256
|
| 41 |
+
}
|
| 42 |
+
|
third_party/GPEN/face_detect/data/data_augment.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import cv2
|
| 2 |
+
import numpy as np
|
| 3 |
+
import random
|
| 4 |
+
from utils.box_utils import matrix_iof
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def _crop(image, boxes, labels, landm, img_dim):
|
| 8 |
+
height, width, _ = image.shape
|
| 9 |
+
pad_image_flag = True
|
| 10 |
+
|
| 11 |
+
for _ in range(250):
|
| 12 |
+
"""
|
| 13 |
+
if random.uniform(0, 1) <= 0.2:
|
| 14 |
+
scale = 1.0
|
| 15 |
+
else:
|
| 16 |
+
scale = random.uniform(0.3, 1.0)
|
| 17 |
+
"""
|
| 18 |
+
PRE_SCALES = [0.3, 0.45, 0.6, 0.8, 1.0]
|
| 19 |
+
scale = random.choice(PRE_SCALES)
|
| 20 |
+
short_side = min(width, height)
|
| 21 |
+
w = int(scale * short_side)
|
| 22 |
+
h = w
|
| 23 |
+
|
| 24 |
+
if width == w:
|
| 25 |
+
l = 0
|
| 26 |
+
else:
|
| 27 |
+
l = random.randrange(width - w)
|
| 28 |
+
if height == h:
|
| 29 |
+
t = 0
|
| 30 |
+
else:
|
| 31 |
+
t = random.randrange(height - h)
|
| 32 |
+
roi = np.array((l, t, l + w, t + h))
|
| 33 |
+
|
| 34 |
+
value = matrix_iof(boxes, roi[np.newaxis])
|
| 35 |
+
flag = (value >= 1)
|
| 36 |
+
if not flag.any():
|
| 37 |
+
continue
|
| 38 |
+
|
| 39 |
+
centers = (boxes[:, :2] + boxes[:, 2:]) / 2
|
| 40 |
+
mask_a = np.logical_and(roi[:2] < centers, centers < roi[2:]).all(axis=1)
|
| 41 |
+
boxes_t = boxes[mask_a].copy()
|
| 42 |
+
labels_t = labels[mask_a].copy()
|
| 43 |
+
landms_t = landm[mask_a].copy()
|
| 44 |
+
landms_t = landms_t.reshape([-1, 5, 2])
|
| 45 |
+
|
| 46 |
+
if boxes_t.shape[0] == 0:
|
| 47 |
+
continue
|
| 48 |
+
|
| 49 |
+
image_t = image[roi[1]:roi[3], roi[0]:roi[2]]
|
| 50 |
+
|
| 51 |
+
boxes_t[:, :2] = np.maximum(boxes_t[:, :2], roi[:2])
|
| 52 |
+
boxes_t[:, :2] -= roi[:2]
|
| 53 |
+
boxes_t[:, 2:] = np.minimum(boxes_t[:, 2:], roi[2:])
|
| 54 |
+
boxes_t[:, 2:] -= roi[:2]
|
| 55 |
+
|
| 56 |
+
# landm
|
| 57 |
+
landms_t[:, :, :2] = landms_t[:, :, :2] - roi[:2]
|
| 58 |
+
landms_t[:, :, :2] = np.maximum(landms_t[:, :, :2], np.array([0, 0]))
|
| 59 |
+
landms_t[:, :, :2] = np.minimum(landms_t[:, :, :2], roi[2:] - roi[:2])
|
| 60 |
+
landms_t = landms_t.reshape([-1, 10])
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# make sure that the cropped image contains at least one face > 16 pixel at training image scale
|
| 64 |
+
b_w_t = (boxes_t[:, 2] - boxes_t[:, 0] + 1) / w * img_dim
|
| 65 |
+
b_h_t = (boxes_t[:, 3] - boxes_t[:, 1] + 1) / h * img_dim
|
| 66 |
+
mask_b = np.minimum(b_w_t, b_h_t) > 0.0
|
| 67 |
+
boxes_t = boxes_t[mask_b]
|
| 68 |
+
labels_t = labels_t[mask_b]
|
| 69 |
+
landms_t = landms_t[mask_b]
|
| 70 |
+
|
| 71 |
+
if boxes_t.shape[0] == 0:
|
| 72 |
+
continue
|
| 73 |
+
|
| 74 |
+
pad_image_flag = False
|
| 75 |
+
|
| 76 |
+
return image_t, boxes_t, labels_t, landms_t, pad_image_flag
|
| 77 |
+
return image, boxes, labels, landm, pad_image_flag
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _distort(image):
|
| 81 |
+
|
| 82 |
+
def _convert(image, alpha=1, beta=0):
|
| 83 |
+
tmp = image.astype(float) * alpha + beta
|
| 84 |
+
tmp[tmp < 0] = 0
|
| 85 |
+
tmp[tmp > 255] = 255
|
| 86 |
+
image[:] = tmp
|
| 87 |
+
|
| 88 |
+
image = image.copy()
|
| 89 |
+
|
| 90 |
+
if random.randrange(2):
|
| 91 |
+
|
| 92 |
+
#brightness distortion
|
| 93 |
+
if random.randrange(2):
|
| 94 |
+
_convert(image, beta=random.uniform(-32, 32))
|
| 95 |
+
|
| 96 |
+
#contrast distortion
|
| 97 |
+
if random.randrange(2):
|
| 98 |
+
_convert(image, alpha=random.uniform(0.5, 1.5))
|
| 99 |
+
|
| 100 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
|
| 101 |
+
|
| 102 |
+
#saturation distortion
|
| 103 |
+
if random.randrange(2):
|
| 104 |
+
_convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5))
|
| 105 |
+
|
| 106 |
+
#hue distortion
|
| 107 |
+
if random.randrange(2):
|
| 108 |
+
tmp = image[:, :, 0].astype(int) + random.randint(-18, 18)
|
| 109 |
+
tmp %= 180
|
| 110 |
+
image[:, :, 0] = tmp
|
| 111 |
+
|
| 112 |
+
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
|
| 113 |
+
|
| 114 |
+
else:
|
| 115 |
+
|
| 116 |
+
#brightness distortion
|
| 117 |
+
if random.randrange(2):
|
| 118 |
+
_convert(image, beta=random.uniform(-32, 32))
|
| 119 |
+
|
| 120 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
|
| 121 |
+
|
| 122 |
+
#saturation distortion
|
| 123 |
+
if random.randrange(2):
|
| 124 |
+
_convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5))
|
| 125 |
+
|
| 126 |
+
#hue distortion
|
| 127 |
+
if random.randrange(2):
|
| 128 |
+
tmp = image[:, :, 0].astype(int) + random.randint(-18, 18)
|
| 129 |
+
tmp %= 180
|
| 130 |
+
image[:, :, 0] = tmp
|
| 131 |
+
|
| 132 |
+
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
|
| 133 |
+
|
| 134 |
+
#contrast distortion
|
| 135 |
+
if random.randrange(2):
|
| 136 |
+
_convert(image, alpha=random.uniform(0.5, 1.5))
|
| 137 |
+
|
| 138 |
+
return image
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _expand(image, boxes, fill, p):
|
| 142 |
+
if random.randrange(2):
|
| 143 |
+
return image, boxes
|
| 144 |
+
|
| 145 |
+
height, width, depth = image.shape
|
| 146 |
+
|
| 147 |
+
scale = random.uniform(1, p)
|
| 148 |
+
w = int(scale * width)
|
| 149 |
+
h = int(scale * height)
|
| 150 |
+
|
| 151 |
+
left = random.randint(0, w - width)
|
| 152 |
+
top = random.randint(0, h - height)
|
| 153 |
+
|
| 154 |
+
boxes_t = boxes.copy()
|
| 155 |
+
boxes_t[:, :2] += (left, top)
|
| 156 |
+
boxes_t[:, 2:] += (left, top)
|
| 157 |
+
expand_image = np.empty(
|
| 158 |
+
(h, w, depth),
|
| 159 |
+
dtype=image.dtype)
|
| 160 |
+
expand_image[:, :] = fill
|
| 161 |
+
expand_image[top:top + height, left:left + width] = image
|
| 162 |
+
image = expand_image
|
| 163 |
+
|
| 164 |
+
return image, boxes_t
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _mirror(image, boxes, landms):
|
| 168 |
+
_, width, _ = image.shape
|
| 169 |
+
if random.randrange(2):
|
| 170 |
+
image = image[:, ::-1]
|
| 171 |
+
boxes = boxes.copy()
|
| 172 |
+
boxes[:, 0::2] = width - boxes[:, 2::-2]
|
| 173 |
+
|
| 174 |
+
# landm
|
| 175 |
+
landms = landms.copy()
|
| 176 |
+
landms = landms.reshape([-1, 5, 2])
|
| 177 |
+
landms[:, :, 0] = width - landms[:, :, 0]
|
| 178 |
+
tmp = landms[:, 1, :].copy()
|
| 179 |
+
landms[:, 1, :] = landms[:, 0, :]
|
| 180 |
+
landms[:, 0, :] = tmp
|
| 181 |
+
tmp1 = landms[:, 4, :].copy()
|
| 182 |
+
landms[:, 4, :] = landms[:, 3, :]
|
| 183 |
+
landms[:, 3, :] = tmp1
|
| 184 |
+
landms = landms.reshape([-1, 10])
|
| 185 |
+
|
| 186 |
+
return image, boxes, landms
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def _pad_to_square(image, rgb_mean, pad_image_flag):
|
| 190 |
+
if not pad_image_flag:
|
| 191 |
+
return image
|
| 192 |
+
height, width, _ = image.shape
|
| 193 |
+
long_side = max(width, height)
|
| 194 |
+
image_t = np.empty((long_side, long_side, 3), dtype=image.dtype)
|
| 195 |
+
image_t[:, :] = rgb_mean
|
| 196 |
+
image_t[0:0 + height, 0:0 + width] = image
|
| 197 |
+
return image_t
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def _resize_subtract_mean(image, insize, rgb_mean):
|
| 201 |
+
interp_methods = [cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_AREA, cv2.INTER_NEAREST, cv2.INTER_LANCZOS4]
|
| 202 |
+
interp_method = interp_methods[random.randrange(5)]
|
| 203 |
+
image = cv2.resize(image, (insize, insize), interpolation=interp_method)
|
| 204 |
+
image = image.astype(np.float32)
|
| 205 |
+
image -= rgb_mean
|
| 206 |
+
return image.transpose(2, 0, 1)
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
class preproc(object):
|
| 210 |
+
|
| 211 |
+
def __init__(self, img_dim, rgb_means):
|
| 212 |
+
self.img_dim = img_dim
|
| 213 |
+
self.rgb_means = rgb_means
|
| 214 |
+
|
| 215 |
+
def __call__(self, image, targets):
|
| 216 |
+
assert targets.shape[0] > 0, "this image does not have gt"
|
| 217 |
+
|
| 218 |
+
boxes = targets[:, :4].copy()
|
| 219 |
+
labels = targets[:, -1].copy()
|
| 220 |
+
landm = targets[:, 4:-1].copy()
|
| 221 |
+
|
| 222 |
+
image_t, boxes_t, labels_t, landm_t, pad_image_flag = _crop(image, boxes, labels, landm, self.img_dim)
|
| 223 |
+
image_t = _distort(image_t)
|
| 224 |
+
image_t = _pad_to_square(image_t,self.rgb_means, pad_image_flag)
|
| 225 |
+
image_t, boxes_t, landm_t = _mirror(image_t, boxes_t, landm_t)
|
| 226 |
+
height, width, _ = image_t.shape
|
| 227 |
+
image_t = _resize_subtract_mean(image_t, self.img_dim, self.rgb_means)
|
| 228 |
+
boxes_t[:, 0::2] /= width
|
| 229 |
+
boxes_t[:, 1::2] /= height
|
| 230 |
+
|
| 231 |
+
landm_t[:, 0::2] /= width
|
| 232 |
+
landm_t[:, 1::2] /= height
|
| 233 |
+
|
| 234 |
+
labels_t = np.expand_dims(labels_t, 1)
|
| 235 |
+
targets_t = np.hstack((boxes_t, landm_t, labels_t))
|
| 236 |
+
|
| 237 |
+
return image_t, targets_t
|
third_party/GPEN/face_detect/data/wider_face.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import os.path
|
| 3 |
+
import sys
|
| 4 |
+
import torch
|
| 5 |
+
import torch.utils.data as data
|
| 6 |
+
import cv2
|
| 7 |
+
import numpy as np
|
| 8 |
+
|
| 9 |
+
class WiderFaceDetection(data.Dataset):
|
| 10 |
+
def __init__(self, txt_path, preproc=None):
|
| 11 |
+
self.preproc = preproc
|
| 12 |
+
self.imgs_path = []
|
| 13 |
+
self.words = []
|
| 14 |
+
f = open(txt_path,'r')
|
| 15 |
+
lines = f.readlines()
|
| 16 |
+
isFirst = True
|
| 17 |
+
labels = []
|
| 18 |
+
for line in lines:
|
| 19 |
+
line = line.rstrip()
|
| 20 |
+
if line.startswith('#'):
|
| 21 |
+
if isFirst is True:
|
| 22 |
+
isFirst = False
|
| 23 |
+
else:
|
| 24 |
+
labels_copy = labels.copy()
|
| 25 |
+
self.words.append(labels_copy)
|
| 26 |
+
labels.clear()
|
| 27 |
+
path = line[2:]
|
| 28 |
+
path = txt_path.replace('label.txt','images/') + path
|
| 29 |
+
self.imgs_path.append(path)
|
| 30 |
+
else:
|
| 31 |
+
line = line.split(' ')
|
| 32 |
+
label = [float(x) for x in line]
|
| 33 |
+
labels.append(label)
|
| 34 |
+
|
| 35 |
+
self.words.append(labels)
|
| 36 |
+
|
| 37 |
+
def __len__(self):
|
| 38 |
+
return len(self.imgs_path)
|
| 39 |
+
|
| 40 |
+
def __getitem__(self, index):
|
| 41 |
+
img = cv2.imread(self.imgs_path[index])
|
| 42 |
+
height, width, _ = img.shape
|
| 43 |
+
|
| 44 |
+
labels = self.words[index]
|
| 45 |
+
annotations = np.zeros((0, 15))
|
| 46 |
+
if len(labels) == 0:
|
| 47 |
+
return annotations
|
| 48 |
+
for idx, label in enumerate(labels):
|
| 49 |
+
annotation = np.zeros((1, 15))
|
| 50 |
+
# bbox
|
| 51 |
+
annotation[0, 0] = label[0] # x1
|
| 52 |
+
annotation[0, 1] = label[1] # y1
|
| 53 |
+
annotation[0, 2] = label[0] + label[2] # x2
|
| 54 |
+
annotation[0, 3] = label[1] + label[3] # y2
|
| 55 |
+
|
| 56 |
+
# landmarks
|
| 57 |
+
annotation[0, 4] = label[4] # l0_x
|
| 58 |
+
annotation[0, 5] = label[5] # l0_y
|
| 59 |
+
annotation[0, 6] = label[7] # l1_x
|
| 60 |
+
annotation[0, 7] = label[8] # l1_y
|
| 61 |
+
annotation[0, 8] = label[10] # l2_x
|
| 62 |
+
annotation[0, 9] = label[11] # l2_y
|
| 63 |
+
annotation[0, 10] = label[13] # l3_x
|
| 64 |
+
annotation[0, 11] = label[14] # l3_y
|
| 65 |
+
annotation[0, 12] = label[16] # l4_x
|
| 66 |
+
annotation[0, 13] = label[17] # l4_y
|
| 67 |
+
if (annotation[0, 4]<0):
|
| 68 |
+
annotation[0, 14] = -1
|
| 69 |
+
else:
|
| 70 |
+
annotation[0, 14] = 1
|
| 71 |
+
|
| 72 |
+
annotations = np.append(annotations, annotation, axis=0)
|
| 73 |
+
target = np.array(annotations)
|
| 74 |
+
if self.preproc is not None:
|
| 75 |
+
img, target = self.preproc(img, target)
|
| 76 |
+
|
| 77 |
+
return torch.from_numpy(img), target
|
| 78 |
+
|
| 79 |
+
def detection_collate(batch):
|
| 80 |
+
"""Custom collate fn for dealing with batches of images that have a different
|
| 81 |
+
number of associated object annotations (bounding boxes).
|
| 82 |
+
|
| 83 |
+
Arguments:
|
| 84 |
+
batch: (tuple) A tuple of tensor images and lists of annotations
|
| 85 |
+
|
| 86 |
+
Return:
|
| 87 |
+
A tuple containing:
|
| 88 |
+
1) (tensor) batch of images stacked on their 0 dim
|
| 89 |
+
2) (list of tensors) annotations for a given image are stacked on 0 dim
|
| 90 |
+
"""
|
| 91 |
+
targets = []
|
| 92 |
+
imgs = []
|
| 93 |
+
for _, sample in enumerate(batch):
|
| 94 |
+
for _, tup in enumerate(sample):
|
| 95 |
+
if torch.is_tensor(tup):
|
| 96 |
+
imgs.append(tup)
|
| 97 |
+
elif isinstance(tup, type(np.empty(0))):
|
| 98 |
+
annos = torch.from_numpy(tup).float()
|
| 99 |
+
targets.append(annos)
|
| 100 |
+
|
| 101 |
+
return (torch.stack(imgs, 0), targets)
|
third_party/GPEN/face_detect/facemodels/__init__.py
ADDED
|
File without changes
|
third_party/GPEN/face_detect/facemodels/net.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import time
|
| 2 |
+
import torch
|
| 3 |
+
import torch.nn as nn
|
| 4 |
+
import torchvision.models._utils as _utils
|
| 5 |
+
import torchvision.models as models
|
| 6 |
+
import torch.nn.functional as F
|
| 7 |
+
from torch.autograd import Variable
|
| 8 |
+
|
| 9 |
+
def conv_bn(inp, oup, stride = 1, leaky = 0):
|
| 10 |
+
return nn.Sequential(
|
| 11 |
+
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
|
| 12 |
+
nn.BatchNorm2d(oup),
|
| 13 |
+
nn.LeakyReLU(negative_slope=leaky, inplace=True)
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
def conv_bn_no_relu(inp, oup, stride):
|
| 17 |
+
return nn.Sequential(
|
| 18 |
+
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
|
| 19 |
+
nn.BatchNorm2d(oup),
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def conv_bn1X1(inp, oup, stride, leaky=0):
|
| 23 |
+
return nn.Sequential(
|
| 24 |
+
nn.Conv2d(inp, oup, 1, stride, padding=0, bias=False),
|
| 25 |
+
nn.BatchNorm2d(oup),
|
| 26 |
+
nn.LeakyReLU(negative_slope=leaky, inplace=True)
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
def conv_dw(inp, oup, stride, leaky=0.1):
|
| 30 |
+
return nn.Sequential(
|
| 31 |
+
nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
|
| 32 |
+
nn.BatchNorm2d(inp),
|
| 33 |
+
nn.LeakyReLU(negative_slope= leaky,inplace=True),
|
| 34 |
+
|
| 35 |
+
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
|
| 36 |
+
nn.BatchNorm2d(oup),
|
| 37 |
+
nn.LeakyReLU(negative_slope= leaky,inplace=True),
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
class SSH(nn.Module):
|
| 41 |
+
def __init__(self, in_channel, out_channel):
|
| 42 |
+
super(SSH, self).__init__()
|
| 43 |
+
assert out_channel % 4 == 0
|
| 44 |
+
leaky = 0
|
| 45 |
+
if (out_channel <= 64):
|
| 46 |
+
leaky = 0.1
|
| 47 |
+
self.conv3X3 = conv_bn_no_relu(in_channel, out_channel//2, stride=1)
|
| 48 |
+
|
| 49 |
+
self.conv5X5_1 = conv_bn(in_channel, out_channel//4, stride=1, leaky = leaky)
|
| 50 |
+
self.conv5X5_2 = conv_bn_no_relu(out_channel//4, out_channel//4, stride=1)
|
| 51 |
+
|
| 52 |
+
self.conv7X7_2 = conv_bn(out_channel//4, out_channel//4, stride=1, leaky = leaky)
|
| 53 |
+
self.conv7x7_3 = conv_bn_no_relu(out_channel//4, out_channel//4, stride=1)
|
| 54 |
+
|
| 55 |
+
def forward(self, input):
|
| 56 |
+
conv3X3 = self.conv3X3(input)
|
| 57 |
+
|
| 58 |
+
conv5X5_1 = self.conv5X5_1(input)
|
| 59 |
+
conv5X5 = self.conv5X5_2(conv5X5_1)
|
| 60 |
+
|
| 61 |
+
conv7X7_2 = self.conv7X7_2(conv5X5_1)
|
| 62 |
+
conv7X7 = self.conv7x7_3(conv7X7_2)
|
| 63 |
+
|
| 64 |
+
out = torch.cat([conv3X3, conv5X5, conv7X7], dim=1)
|
| 65 |
+
out = F.relu(out)
|
| 66 |
+
return out
|
| 67 |
+
|
| 68 |
+
class FPN(nn.Module):
|
| 69 |
+
def __init__(self,in_channels_list,out_channels):
|
| 70 |
+
super(FPN,self).__init__()
|
| 71 |
+
leaky = 0
|
| 72 |
+
if (out_channels <= 64):
|
| 73 |
+
leaky = 0.1
|
| 74 |
+
self.output1 = conv_bn1X1(in_channels_list[0], out_channels, stride = 1, leaky = leaky)
|
| 75 |
+
self.output2 = conv_bn1X1(in_channels_list[1], out_channels, stride = 1, leaky = leaky)
|
| 76 |
+
self.output3 = conv_bn1X1(in_channels_list[2], out_channels, stride = 1, leaky = leaky)
|
| 77 |
+
|
| 78 |
+
self.merge1 = conv_bn(out_channels, out_channels, leaky = leaky)
|
| 79 |
+
self.merge2 = conv_bn(out_channels, out_channels, leaky = leaky)
|
| 80 |
+
|
| 81 |
+
def forward(self, input):
|
| 82 |
+
# names = list(input.keys())
|
| 83 |
+
input = list(input.values())
|
| 84 |
+
|
| 85 |
+
output1 = self.output1(input[0])
|
| 86 |
+
output2 = self.output2(input[1])
|
| 87 |
+
output3 = self.output3(input[2])
|
| 88 |
+
|
| 89 |
+
up3 = F.interpolate(output3, size=[output2.size(2), output2.size(3)], mode="nearest")
|
| 90 |
+
output2 = output2 + up3
|
| 91 |
+
output2 = self.merge2(output2)
|
| 92 |
+
|
| 93 |
+
up2 = F.interpolate(output2, size=[output1.size(2), output1.size(3)], mode="nearest")
|
| 94 |
+
output1 = output1 + up2
|
| 95 |
+
output1 = self.merge1(output1)
|
| 96 |
+
|
| 97 |
+
out = [output1, output2, output3]
|
| 98 |
+
return out
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class MobileNetV1(nn.Module):
|
| 103 |
+
def __init__(self):
|
| 104 |
+
super(MobileNetV1, self).__init__()
|
| 105 |
+
self.stage1 = nn.Sequential(
|
| 106 |
+
conv_bn(3, 8, 2, leaky = 0.1), # 3
|
| 107 |
+
conv_dw(8, 16, 1), # 7
|
| 108 |
+
conv_dw(16, 32, 2), # 11
|
| 109 |
+
conv_dw(32, 32, 1), # 19
|
| 110 |
+
conv_dw(32, 64, 2), # 27
|
| 111 |
+
conv_dw(64, 64, 1), # 43
|
| 112 |
+
)
|
| 113 |
+
self.stage2 = nn.Sequential(
|
| 114 |
+
conv_dw(64, 128, 2), # 43 + 16 = 59
|
| 115 |
+
conv_dw(128, 128, 1), # 59 + 32 = 91
|
| 116 |
+
conv_dw(128, 128, 1), # 91 + 32 = 123
|
| 117 |
+
conv_dw(128, 128, 1), # 123 + 32 = 155
|
| 118 |
+
conv_dw(128, 128, 1), # 155 + 32 = 187
|
| 119 |
+
conv_dw(128, 128, 1), # 187 + 32 = 219
|
| 120 |
+
)
|
| 121 |
+
self.stage3 = nn.Sequential(
|
| 122 |
+
conv_dw(128, 256, 2), # 219 +3 2 = 241
|
| 123 |
+
conv_dw(256, 256, 1), # 241 + 64 = 301
|
| 124 |
+
)
|
| 125 |
+
self.avg = nn.AdaptiveAvgPool2d((1,1))
|
| 126 |
+
self.fc = nn.Linear(256, 1000)
|
| 127 |
+
|
| 128 |
+
def forward(self, x):
|
| 129 |
+
x = self.stage1(x)
|
| 130 |
+
x = self.stage2(x)
|
| 131 |
+
x = self.stage3(x)
|
| 132 |
+
x = self.avg(x)
|
| 133 |
+
# x = self.model(x)
|
| 134 |
+
x = x.view(-1, 256)
|
| 135 |
+
x = self.fc(x)
|
| 136 |
+
return x
|
| 137 |
+
|
third_party/GPEN/face_detect/facemodels/retinaface.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torchvision.models.detection.backbone_utils as backbone_utils
|
| 4 |
+
import torchvision.models._utils as _utils
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
from collections import OrderedDict
|
| 7 |
+
|
| 8 |
+
from facemodels.net import MobileNetV1 as MobileNetV1
|
| 9 |
+
from facemodels.net import FPN as FPN
|
| 10 |
+
from facemodels.net import SSH as SSH
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class ClassHead(nn.Module):
|
| 15 |
+
def __init__(self,inchannels=512,num_anchors=3):
|
| 16 |
+
super(ClassHead,self).__init__()
|
| 17 |
+
self.num_anchors = num_anchors
|
| 18 |
+
self.conv1x1 = nn.Conv2d(inchannels,self.num_anchors*2,kernel_size=(1,1),stride=1,padding=0)
|
| 19 |
+
|
| 20 |
+
def forward(self,x):
|
| 21 |
+
out = self.conv1x1(x)
|
| 22 |
+
out = out.permute(0,2,3,1).contiguous()
|
| 23 |
+
|
| 24 |
+
return out.view(out.shape[0], -1, 2)
|
| 25 |
+
|
| 26 |
+
class BboxHead(nn.Module):
|
| 27 |
+
def __init__(self,inchannels=512,num_anchors=3):
|
| 28 |
+
super(BboxHead,self).__init__()
|
| 29 |
+
self.conv1x1 = nn.Conv2d(inchannels,num_anchors*4,kernel_size=(1,1),stride=1,padding=0)
|
| 30 |
+
|
| 31 |
+
def forward(self,x):
|
| 32 |
+
out = self.conv1x1(x)
|
| 33 |
+
out = out.permute(0,2,3,1).contiguous()
|
| 34 |
+
|
| 35 |
+
return out.view(out.shape[0], -1, 4)
|
| 36 |
+
|
| 37 |
+
class LandmarkHead(nn.Module):
|
| 38 |
+
def __init__(self,inchannels=512,num_anchors=3):
|
| 39 |
+
super(LandmarkHead,self).__init__()
|
| 40 |
+
self.conv1x1 = nn.Conv2d(inchannels,num_anchors*10,kernel_size=(1,1),stride=1,padding=0)
|
| 41 |
+
|
| 42 |
+
def forward(self,x):
|
| 43 |
+
out = self.conv1x1(x)
|
| 44 |
+
out = out.permute(0,2,3,1).contiguous()
|
| 45 |
+
|
| 46 |
+
return out.view(out.shape[0], -1, 10)
|
| 47 |
+
|
| 48 |
+
class RetinaFace(nn.Module):
|
| 49 |
+
def __init__(self, cfg = None, phase = 'train'):
|
| 50 |
+
"""
|
| 51 |
+
:param cfg: Network related settings.
|
| 52 |
+
:param phase: train or test.
|
| 53 |
+
"""
|
| 54 |
+
super(RetinaFace,self).__init__()
|
| 55 |
+
self.phase = phase
|
| 56 |
+
backbone = None
|
| 57 |
+
if cfg['name'] == 'mobilenet0.25':
|
| 58 |
+
backbone = MobileNetV1()
|
| 59 |
+
if cfg['pretrain']:
|
| 60 |
+
checkpoint = torch.load("./weights/mobilenetV1X0.25_pretrain.tar", map_location=torch.device('cpu'))
|
| 61 |
+
from collections import OrderedDict
|
| 62 |
+
new_state_dict = OrderedDict()
|
| 63 |
+
for k, v in checkpoint['state_dict'].items():
|
| 64 |
+
name = k[7:] # remove module.
|
| 65 |
+
new_state_dict[name] = v
|
| 66 |
+
# load params
|
| 67 |
+
backbone.load_state_dict(new_state_dict)
|
| 68 |
+
elif cfg['name'] == 'Resnet50':
|
| 69 |
+
import torchvision.models as models
|
| 70 |
+
backbone = models.resnet50(pretrained=cfg['pretrain'])
|
| 71 |
+
|
| 72 |
+
self.body = _utils.IntermediateLayerGetter(backbone, cfg['return_layers'])
|
| 73 |
+
in_channels_stage2 = cfg['in_channel']
|
| 74 |
+
in_channels_list = [
|
| 75 |
+
in_channels_stage2 * 2,
|
| 76 |
+
in_channels_stage2 * 4,
|
| 77 |
+
in_channels_stage2 * 8,
|
| 78 |
+
]
|
| 79 |
+
out_channels = cfg['out_channel']
|
| 80 |
+
self.fpn = FPN(in_channels_list,out_channels)
|
| 81 |
+
self.ssh1 = SSH(out_channels, out_channels)
|
| 82 |
+
self.ssh2 = SSH(out_channels, out_channels)
|
| 83 |
+
self.ssh3 = SSH(out_channels, out_channels)
|
| 84 |
+
|
| 85 |
+
self.ClassHead = self._make_class_head(fpn_num=3, inchannels=cfg['out_channel'])
|
| 86 |
+
self.BboxHead = self._make_bbox_head(fpn_num=3, inchannels=cfg['out_channel'])
|
| 87 |
+
self.LandmarkHead = self._make_landmark_head(fpn_num=3, inchannels=cfg['out_channel'])
|
| 88 |
+
|
| 89 |
+
def _make_class_head(self,fpn_num=3,inchannels=64,anchor_num=2):
|
| 90 |
+
classhead = nn.ModuleList()
|
| 91 |
+
for i in range(fpn_num):
|
| 92 |
+
classhead.append(ClassHead(inchannels,anchor_num))
|
| 93 |
+
return classhead
|
| 94 |
+
|
| 95 |
+
def _make_bbox_head(self,fpn_num=3,inchannels=64,anchor_num=2):
|
| 96 |
+
bboxhead = nn.ModuleList()
|
| 97 |
+
for i in range(fpn_num):
|
| 98 |
+
bboxhead.append(BboxHead(inchannels,anchor_num))
|
| 99 |
+
return bboxhead
|
| 100 |
+
|
| 101 |
+
def _make_landmark_head(self,fpn_num=3,inchannels=64,anchor_num=2):
|
| 102 |
+
landmarkhead = nn.ModuleList()
|
| 103 |
+
for i in range(fpn_num):
|
| 104 |
+
landmarkhead.append(LandmarkHead(inchannels,anchor_num))
|
| 105 |
+
return landmarkhead
|
| 106 |
+
|
| 107 |
+
def forward(self,inputs):
|
| 108 |
+
out = self.body(inputs)
|
| 109 |
+
|
| 110 |
+
# FPN
|
| 111 |
+
fpn = self.fpn(out)
|
| 112 |
+
|
| 113 |
+
# SSH
|
| 114 |
+
feature1 = self.ssh1(fpn[0])
|
| 115 |
+
feature2 = self.ssh2(fpn[1])
|
| 116 |
+
feature3 = self.ssh3(fpn[2])
|
| 117 |
+
features = [feature1, feature2, feature3]
|
| 118 |
+
|
| 119 |
+
bbox_regressions = torch.cat([self.BboxHead[i](feature) for i, feature in enumerate(features)], dim=1)
|
| 120 |
+
classifications = torch.cat([self.ClassHead[i](feature) for i, feature in enumerate(features)],dim=1)
|
| 121 |
+
ldm_regressions = torch.cat([self.LandmarkHead[i](feature) for i, feature in enumerate(features)], dim=1)
|
| 122 |
+
|
| 123 |
+
if self.phase == 'train':
|
| 124 |
+
output = (bbox_regressions, classifications, ldm_regressions)
|
| 125 |
+
else:
|
| 126 |
+
output = (bbox_regressions, F.softmax(classifications, dim=-1), ldm_regressions)
|
| 127 |
+
return output
|
third_party/GPEN/face_detect/layers/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .functions import *
|
| 2 |
+
from .modules import *
|
third_party/GPEN/face_detect/layers/functions/prior_box.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from itertools import product as product
|
| 3 |
+
import numpy as np
|
| 4 |
+
from math import ceil
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class PriorBox(object):
|
| 8 |
+
def __init__(self, cfg, image_size=None, phase='train'):
|
| 9 |
+
super(PriorBox, self).__init__()
|
| 10 |
+
self.min_sizes = cfg['min_sizes']
|
| 11 |
+
self.steps = cfg['steps']
|
| 12 |
+
self.clip = cfg['clip']
|
| 13 |
+
self.image_size = image_size
|
| 14 |
+
self.feature_maps = [[ceil(self.image_size[0]/step), ceil(self.image_size[1]/step)] for step in self.steps]
|
| 15 |
+
self.name = "s"
|
| 16 |
+
|
| 17 |
+
def forward(self):
|
| 18 |
+
anchors = []
|
| 19 |
+
for k, f in enumerate(self.feature_maps):
|
| 20 |
+
min_sizes = self.min_sizes[k]
|
| 21 |
+
for i, j in product(range(f[0]), range(f[1])):
|
| 22 |
+
for min_size in min_sizes:
|
| 23 |
+
s_kx = min_size / self.image_size[1]
|
| 24 |
+
s_ky = min_size / self.image_size[0]
|
| 25 |
+
dense_cx = [x * self.steps[k] / self.image_size[1] for x in [j + 0.5]]
|
| 26 |
+
dense_cy = [y * self.steps[k] / self.image_size[0] for y in [i + 0.5]]
|
| 27 |
+
for cy, cx in product(dense_cy, dense_cx):
|
| 28 |
+
anchors += [cx, cy, s_kx, s_ky]
|
| 29 |
+
|
| 30 |
+
# back to torch land
|
| 31 |
+
output = torch.Tensor(anchors).view(-1, 4)
|
| 32 |
+
if self.clip:
|
| 33 |
+
output.clamp_(max=1, min=0)
|
| 34 |
+
return output
|
third_party/GPEN/face_detect/layers/modules/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .multibox_loss import MultiBoxLoss
|
| 2 |
+
|
| 3 |
+
__all__ = ['MultiBoxLoss']
|
third_party/GPEN/face_detect/layers/modules/multibox_loss.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.nn.functional as F
|
| 4 |
+
from torch.autograd import Variable
|
| 5 |
+
from utils.box_utils import match, log_sum_exp
|
| 6 |
+
from data import cfg_mnet
|
| 7 |
+
GPU = cfg_mnet['gpu_train']
|
| 8 |
+
|
| 9 |
+
class MultiBoxLoss(nn.Module):
|
| 10 |
+
"""SSD Weighted Loss Function
|
| 11 |
+
Compute Targets:
|
| 12 |
+
1) Produce Confidence Target Indices by matching ground truth boxes
|
| 13 |
+
with (default) 'priorboxes' that have jaccard index > threshold parameter
|
| 14 |
+
(default threshold: 0.5).
|
| 15 |
+
2) Produce localization target by 'encoding' variance into offsets of ground
|
| 16 |
+
truth boxes and their matched 'priorboxes'.
|
| 17 |
+
3) Hard negative mining to filter the excessive number of negative examples
|
| 18 |
+
that comes with using a large number of default bounding boxes.
|
| 19 |
+
(default negative:positive ratio 3:1)
|
| 20 |
+
Objective Loss:
|
| 21 |
+
L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
|
| 22 |
+
Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss
|
| 23 |
+
weighted by α which is set to 1 by cross val.
|
| 24 |
+
Args:
|
| 25 |
+
c: class confidences,
|
| 26 |
+
l: predicted boxes,
|
| 27 |
+
g: ground truth boxes
|
| 28 |
+
N: number of matched default boxes
|
| 29 |
+
See: https://arxiv.org/pdf/1512.02325.pdf for more details.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target):
|
| 33 |
+
super(MultiBoxLoss, self).__init__()
|
| 34 |
+
self.num_classes = num_classes
|
| 35 |
+
self.threshold = overlap_thresh
|
| 36 |
+
self.background_label = bkg_label
|
| 37 |
+
self.encode_target = encode_target
|
| 38 |
+
self.use_prior_for_matching = prior_for_matching
|
| 39 |
+
self.do_neg_mining = neg_mining
|
| 40 |
+
self.negpos_ratio = neg_pos
|
| 41 |
+
self.neg_overlap = neg_overlap
|
| 42 |
+
self.variance = [0.1, 0.2]
|
| 43 |
+
|
| 44 |
+
def forward(self, predictions, priors, targets):
|
| 45 |
+
"""Multibox Loss
|
| 46 |
+
Args:
|
| 47 |
+
predictions (tuple): A tuple containing loc preds, conf preds,
|
| 48 |
+
and prior boxes from SSD net.
|
| 49 |
+
conf shape: torch.size(batch_size,num_priors,num_classes)
|
| 50 |
+
loc shape: torch.size(batch_size,num_priors,4)
|
| 51 |
+
priors shape: torch.size(num_priors,4)
|
| 52 |
+
|
| 53 |
+
ground_truth (tensor): Ground truth boxes and labels for a batch,
|
| 54 |
+
shape: [batch_size,num_objs,5] (last idx is the label).
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
loc_data, conf_data, landm_data = predictions
|
| 58 |
+
priors = priors
|
| 59 |
+
num = loc_data.size(0)
|
| 60 |
+
num_priors = (priors.size(0))
|
| 61 |
+
|
| 62 |
+
# match priors (default boxes) and ground truth boxes
|
| 63 |
+
loc_t = torch.Tensor(num, num_priors, 4)
|
| 64 |
+
landm_t = torch.Tensor(num, num_priors, 10)
|
| 65 |
+
conf_t = torch.LongTensor(num, num_priors)
|
| 66 |
+
for idx in range(num):
|
| 67 |
+
truths = targets[idx][:, :4].data
|
| 68 |
+
labels = targets[idx][:, -1].data
|
| 69 |
+
landms = targets[idx][:, 4:14].data
|
| 70 |
+
defaults = priors.data
|
| 71 |
+
match(self.threshold, truths, defaults, self.variance, labels, landms, loc_t, conf_t, landm_t, idx)
|
| 72 |
+
if GPU:
|
| 73 |
+
loc_t = loc_t.cuda()
|
| 74 |
+
conf_t = conf_t.cuda()
|
| 75 |
+
landm_t = landm_t.cuda()
|
| 76 |
+
|
| 77 |
+
zeros = torch.tensor(0).cuda()
|
| 78 |
+
# landm Loss (Smooth L1)
|
| 79 |
+
# Shape: [batch,num_priors,10]
|
| 80 |
+
pos1 = conf_t > zeros
|
| 81 |
+
num_pos_landm = pos1.long().sum(1, keepdim=True)
|
| 82 |
+
N1 = max(num_pos_landm.data.sum().float(), 1)
|
| 83 |
+
pos_idx1 = pos1.unsqueeze(pos1.dim()).expand_as(landm_data)
|
| 84 |
+
landm_p = landm_data[pos_idx1].view(-1, 10)
|
| 85 |
+
landm_t = landm_t[pos_idx1].view(-1, 10)
|
| 86 |
+
loss_landm = F.smooth_l1_loss(landm_p, landm_t, reduction='sum')
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
pos = conf_t != zeros
|
| 90 |
+
conf_t[pos] = 1
|
| 91 |
+
|
| 92 |
+
# Localization Loss (Smooth L1)
|
| 93 |
+
# Shape: [batch,num_priors,4]
|
| 94 |
+
pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data)
|
| 95 |
+
loc_p = loc_data[pos_idx].view(-1, 4)
|
| 96 |
+
loc_t = loc_t[pos_idx].view(-1, 4)
|
| 97 |
+
loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='sum')
|
| 98 |
+
|
| 99 |
+
# Compute max conf across batch for hard negative mining
|
| 100 |
+
batch_conf = conf_data.view(-1, self.num_classes)
|
| 101 |
+
loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1))
|
| 102 |
+
|
| 103 |
+
# Hard Negative Mining
|
| 104 |
+
loss_c[pos.view(-1, 1)] = 0 # filter out pos boxes for now
|
| 105 |
+
loss_c = loss_c.view(num, -1)
|
| 106 |
+
_, loss_idx = loss_c.sort(1, descending=True)
|
| 107 |
+
_, idx_rank = loss_idx.sort(1)
|
| 108 |
+
num_pos = pos.long().sum(1, keepdim=True)
|
| 109 |
+
num_neg = torch.clamp(self.negpos_ratio*num_pos, max=pos.size(1)-1)
|
| 110 |
+
neg = idx_rank < num_neg.expand_as(idx_rank)
|
| 111 |
+
|
| 112 |
+
# Confidence Loss Including Positive and Negative Examples
|
| 113 |
+
pos_idx = pos.unsqueeze(2).expand_as(conf_data)
|
| 114 |
+
neg_idx = neg.unsqueeze(2).expand_as(conf_data)
|
| 115 |
+
conf_p = conf_data[(pos_idx+neg_idx).gt(0)].view(-1,self.num_classes)
|
| 116 |
+
targets_weighted = conf_t[(pos+neg).gt(0)]
|
| 117 |
+
loss_c = F.cross_entropy(conf_p, targets_weighted, reduction='sum')
|
| 118 |
+
|
| 119 |
+
# Sum of losses: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
|
| 120 |
+
N = max(num_pos.data.sum().float(), 1)
|
| 121 |
+
loss_l /= N
|
| 122 |
+
loss_c /= N
|
| 123 |
+
loss_landm /= N1
|
| 124 |
+
|
| 125 |
+
return loss_l, loss_c, loss_landm
|
third_party/GPEN/face_detect/retinaface_detection.py
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
'''
|
| 5 |
+
import os
|
| 6 |
+
import torch
|
| 7 |
+
import torch.backends.cudnn as cudnn
|
| 8 |
+
import numpy as np
|
| 9 |
+
import cv2
|
| 10 |
+
import time
|
| 11 |
+
import torch.nn.functional as F
|
| 12 |
+
|
| 13 |
+
from data import cfg_re50
|
| 14 |
+
from layers.functions.prior_box import PriorBox
|
| 15 |
+
from utils.nms.py_cpu_nms import py_cpu_nms
|
| 16 |
+
from facemodels.retinaface import RetinaFace
|
| 17 |
+
from utils.box_utils import decode, decode_landm
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class RetinaFaceDetection(object):
|
| 21 |
+
def __init__(self, base_dir, device='cuda', network='RetinaFace-R50'):
|
| 22 |
+
torch.set_grad_enabled(False)
|
| 23 |
+
cudnn.benchmark = True
|
| 24 |
+
self.pretrained_path = os.path.join(base_dir, 'weights', network+'.pth')
|
| 25 |
+
self.device = device #torch.cuda.current_device()
|
| 26 |
+
self.cfg = cfg_re50
|
| 27 |
+
self.net = RetinaFace(cfg=self.cfg, phase='test')
|
| 28 |
+
self.load_model()
|
| 29 |
+
self.net = self.net.to(device)
|
| 30 |
+
|
| 31 |
+
self.mean = torch.tensor([[[[104]], [[117]], [[123]]]]).to(device)
|
| 32 |
+
|
| 33 |
+
def check_keys(self, pretrained_state_dict):
|
| 34 |
+
ckpt_keys = set(pretrained_state_dict.keys())
|
| 35 |
+
model_keys = set(self.net.state_dict().keys())
|
| 36 |
+
used_pretrained_keys = model_keys & ckpt_keys
|
| 37 |
+
unused_pretrained_keys = ckpt_keys - model_keys
|
| 38 |
+
missing_keys = model_keys - ckpt_keys
|
| 39 |
+
assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint'
|
| 40 |
+
return True
|
| 41 |
+
|
| 42 |
+
def remove_prefix(self, state_dict, prefix):
|
| 43 |
+
''' Old style model is stored with all names of parameters sharing common prefix 'module.' '''
|
| 44 |
+
f = lambda x: x.split(prefix, 1)[-1] if x.startswith(prefix) else x
|
| 45 |
+
return {f(key): value for key, value in state_dict.items()}
|
| 46 |
+
|
| 47 |
+
def load_model(self, load_to_cpu=False):
|
| 48 |
+
#if load_to_cpu:
|
| 49 |
+
# pretrained_dict = torch.load(self.pretrained_path, map_location=lambda storage, loc: storage)
|
| 50 |
+
#else:
|
| 51 |
+
# pretrained_dict = torch.load(self.pretrained_path, map_location=lambda storage, loc: storage.cuda())
|
| 52 |
+
pretrained_dict = torch.load(self.pretrained_path, map_location=torch.device('cpu'))
|
| 53 |
+
if "state_dict" in pretrained_dict.keys():
|
| 54 |
+
pretrained_dict = self.remove_prefix(pretrained_dict['state_dict'], 'module.')
|
| 55 |
+
else:
|
| 56 |
+
pretrained_dict = self.remove_prefix(pretrained_dict, 'module.')
|
| 57 |
+
self.check_keys(pretrained_dict)
|
| 58 |
+
self.net.load_state_dict(pretrained_dict, strict=False)
|
| 59 |
+
self.net.eval()
|
| 60 |
+
|
| 61 |
+
def detect(self, img_raw, resize=1, confidence_threshold=0.9, nms_threshold=0.4, top_k=5000, keep_top_k=750, save_image=False):
|
| 62 |
+
img = np.float32(img_raw)
|
| 63 |
+
|
| 64 |
+
im_height, im_width = img.shape[:2]
|
| 65 |
+
ss = 1.0
|
| 66 |
+
# tricky
|
| 67 |
+
if max(im_height, im_width) > 1500:
|
| 68 |
+
ss = 1000.0/max(im_height, im_width)
|
| 69 |
+
img = cv2.resize(img, (0,0), fx=ss, fy=ss)
|
| 70 |
+
im_height, im_width = img.shape[:2]
|
| 71 |
+
|
| 72 |
+
scale = torch.Tensor([img.shape[1], img.shape[0], img.shape[1], img.shape[0]])
|
| 73 |
+
img -= (104, 117, 123)
|
| 74 |
+
img = img.transpose(2, 0, 1)
|
| 75 |
+
img = torch.from_numpy(img).unsqueeze(0)
|
| 76 |
+
img = img.to(self.device)
|
| 77 |
+
scale = scale.to(self.device)
|
| 78 |
+
|
| 79 |
+
loc, conf, landms = self.net(img) # forward pass
|
| 80 |
+
|
| 81 |
+
priorbox = PriorBox(self.cfg, image_size=(im_height, im_width))
|
| 82 |
+
priors = priorbox.forward()
|
| 83 |
+
priors = priors.to(self.device)
|
| 84 |
+
prior_data = priors.data
|
| 85 |
+
boxes = decode(loc.data.squeeze(0), prior_data, self.cfg['variance'])
|
| 86 |
+
boxes = boxes * scale / resize
|
| 87 |
+
boxes = boxes.cpu().numpy()
|
| 88 |
+
scores = conf.squeeze(0).data.cpu().numpy()[:, 1]
|
| 89 |
+
landms = decode_landm(landms.data.squeeze(0), prior_data, self.cfg['variance'])
|
| 90 |
+
scale1 = torch.Tensor([img.shape[3], img.shape[2], img.shape[3], img.shape[2],
|
| 91 |
+
img.shape[3], img.shape[2], img.shape[3], img.shape[2],
|
| 92 |
+
img.shape[3], img.shape[2]])
|
| 93 |
+
scale1 = scale1.to(self.device)
|
| 94 |
+
landms = landms * scale1 / resize
|
| 95 |
+
landms = landms.cpu().numpy()
|
| 96 |
+
|
| 97 |
+
# ignore low scores
|
| 98 |
+
inds = np.where(scores > confidence_threshold)[0]
|
| 99 |
+
boxes = boxes[inds]
|
| 100 |
+
landms = landms[inds]
|
| 101 |
+
scores = scores[inds]
|
| 102 |
+
|
| 103 |
+
# keep top-K before NMS
|
| 104 |
+
order = scores.argsort()[::-1][:top_k]
|
| 105 |
+
boxes = boxes[order]
|
| 106 |
+
landms = landms[order]
|
| 107 |
+
scores = scores[order]
|
| 108 |
+
|
| 109 |
+
# do NMS
|
| 110 |
+
dets = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False)
|
| 111 |
+
keep = py_cpu_nms(dets, nms_threshold)
|
| 112 |
+
# keep = nms(dets, nms_threshold,force_cpu=args.cpu)
|
| 113 |
+
dets = dets[keep, :]
|
| 114 |
+
landms = landms[keep]
|
| 115 |
+
|
| 116 |
+
# keep top-K faster NMS
|
| 117 |
+
dets = dets[:keep_top_k, :]
|
| 118 |
+
landms = landms[:keep_top_k, :]
|
| 119 |
+
|
| 120 |
+
# sort faces(delete)
|
| 121 |
+
'''
|
| 122 |
+
fscores = [det[4] for det in dets]
|
| 123 |
+
sorted_idx = sorted(range(len(fscores)), key=lambda k:fscores[k], reverse=False) # sort index
|
| 124 |
+
tmp = [landms[idx] for idx in sorted_idx]
|
| 125 |
+
landms = np.asarray(tmp)
|
| 126 |
+
'''
|
| 127 |
+
|
| 128 |
+
landms = landms.reshape((-1, 5, 2))
|
| 129 |
+
landms = landms.transpose((0, 2, 1))
|
| 130 |
+
landms = landms.reshape(-1, 10, )
|
| 131 |
+
return dets/ss, landms/ss
|
| 132 |
+
|
| 133 |
+
def detect_tensor(self, img, resize=1, confidence_threshold=0.9, nms_threshold=0.4, top_k=5000, keep_top_k=750, save_image=False):
|
| 134 |
+
im_height, im_width = img.shape[-2:]
|
| 135 |
+
ss = 1000/max(im_height, im_width)
|
| 136 |
+
img = F.interpolate(img, scale_factor=ss)
|
| 137 |
+
im_height, im_width = img.shape[-2:]
|
| 138 |
+
scale = torch.Tensor([im_width, im_height, im_width, im_height]).to(self.device)
|
| 139 |
+
img -= self.mean
|
| 140 |
+
|
| 141 |
+
loc, conf, landms = self.net(img) # forward pass
|
| 142 |
+
|
| 143 |
+
priorbox = PriorBox(self.cfg, image_size=(im_height, im_width))
|
| 144 |
+
priors = priorbox.forward()
|
| 145 |
+
priors = priors.to(self.device)
|
| 146 |
+
prior_data = priors.data
|
| 147 |
+
boxes = decode(loc.data.squeeze(0), prior_data, self.cfg['variance'])
|
| 148 |
+
boxes = boxes * scale / resize
|
| 149 |
+
boxes = boxes.cpu().numpy()
|
| 150 |
+
scores = conf.squeeze(0).data.cpu().numpy()[:, 1]
|
| 151 |
+
landms = decode_landm(landms.data.squeeze(0), prior_data, self.cfg['variance'])
|
| 152 |
+
scale1 = torch.Tensor([img.shape[3], img.shape[2], img.shape[3], img.shape[2],
|
| 153 |
+
img.shape[3], img.shape[2], img.shape[3], img.shape[2],
|
| 154 |
+
img.shape[3], img.shape[2]])
|
| 155 |
+
scale1 = scale1.to(self.device)
|
| 156 |
+
landms = landms * scale1 / resize
|
| 157 |
+
landms = landms.cpu().numpy()
|
| 158 |
+
|
| 159 |
+
# ignore low scores
|
| 160 |
+
inds = np.where(scores > confidence_threshold)[0]
|
| 161 |
+
boxes = boxes[inds]
|
| 162 |
+
landms = landms[inds]
|
| 163 |
+
scores = scores[inds]
|
| 164 |
+
|
| 165 |
+
# keep top-K before NMS
|
| 166 |
+
order = scores.argsort()[::-1][:top_k]
|
| 167 |
+
boxes = boxes[order]
|
| 168 |
+
landms = landms[order]
|
| 169 |
+
scores = scores[order]
|
| 170 |
+
|
| 171 |
+
# do NMS
|
| 172 |
+
dets = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False)
|
| 173 |
+
keep = py_cpu_nms(dets, nms_threshold)
|
| 174 |
+
# keep = nms(dets, nms_threshold,force_cpu=args.cpu)
|
| 175 |
+
dets = dets[keep, :]
|
| 176 |
+
landms = landms[keep]
|
| 177 |
+
|
| 178 |
+
# keep top-K faster NMS
|
| 179 |
+
dets = dets[:keep_top_k, :]
|
| 180 |
+
landms = landms[:keep_top_k, :]
|
| 181 |
+
|
| 182 |
+
# sort faces(delete)
|
| 183 |
+
'''
|
| 184 |
+
fscores = [det[4] for det in dets]
|
| 185 |
+
sorted_idx = sorted(range(len(fscores)), key=lambda k:fscores[k], reverse=False) # sort index
|
| 186 |
+
tmp = [landms[idx] for idx in sorted_idx]
|
| 187 |
+
landms = np.asarray(tmp)
|
| 188 |
+
'''
|
| 189 |
+
|
| 190 |
+
landms = landms.reshape((-1, 5, 2))
|
| 191 |
+
landms = landms.transpose((0, 2, 1))
|
| 192 |
+
landms = landms.reshape(-1, 10, )
|
| 193 |
+
return dets/ss, landms/ss
|
third_party/GPEN/face_detect/utils/__init__.py
ADDED
|
File without changes
|
third_party/GPEN/face_detect/utils/box_utils.py
ADDED
|
@@ -0,0 +1,330 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import numpy as np
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def point_form(boxes):
|
| 6 |
+
""" Convert prior_boxes to (xmin, ymin, xmax, ymax)
|
| 7 |
+
representation for comparison to point form ground truth data.
|
| 8 |
+
Args:
|
| 9 |
+
boxes: (tensor) center-size default boxes from priorbox layers.
|
| 10 |
+
Return:
|
| 11 |
+
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
|
| 12 |
+
"""
|
| 13 |
+
return torch.cat((boxes[:, :2] - boxes[:, 2:]/2, # xmin, ymin
|
| 14 |
+
boxes[:, :2] + boxes[:, 2:]/2), 1) # xmax, ymax
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def center_size(boxes):
|
| 18 |
+
""" Convert prior_boxes to (cx, cy, w, h)
|
| 19 |
+
representation for comparison to center-size form ground truth data.
|
| 20 |
+
Args:
|
| 21 |
+
boxes: (tensor) point_form boxes
|
| 22 |
+
Return:
|
| 23 |
+
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
|
| 24 |
+
"""
|
| 25 |
+
return torch.cat((boxes[:, 2:] + boxes[:, :2])/2, # cx, cy
|
| 26 |
+
boxes[:, 2:] - boxes[:, :2], 1) # w, h
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def intersect(box_a, box_b):
|
| 30 |
+
""" We resize both tensors to [A,B,2] without new malloc:
|
| 31 |
+
[A,2] -> [A,1,2] -> [A,B,2]
|
| 32 |
+
[B,2] -> [1,B,2] -> [A,B,2]
|
| 33 |
+
Then we compute the area of intersect between box_a and box_b.
|
| 34 |
+
Args:
|
| 35 |
+
box_a: (tensor) bounding boxes, Shape: [A,4].
|
| 36 |
+
box_b: (tensor) bounding boxes, Shape: [B,4].
|
| 37 |
+
Return:
|
| 38 |
+
(tensor) intersection area, Shape: [A,B].
|
| 39 |
+
"""
|
| 40 |
+
A = box_a.size(0)
|
| 41 |
+
B = box_b.size(0)
|
| 42 |
+
max_xy = torch.min(box_a[:, 2:].unsqueeze(1).expand(A, B, 2),
|
| 43 |
+
box_b[:, 2:].unsqueeze(0).expand(A, B, 2))
|
| 44 |
+
min_xy = torch.max(box_a[:, :2].unsqueeze(1).expand(A, B, 2),
|
| 45 |
+
box_b[:, :2].unsqueeze(0).expand(A, B, 2))
|
| 46 |
+
inter = torch.clamp((max_xy - min_xy), min=0)
|
| 47 |
+
return inter[:, :, 0] * inter[:, :, 1]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def jaccard(box_a, box_b):
|
| 51 |
+
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
|
| 52 |
+
is simply the intersection over union of two boxes. Here we operate on
|
| 53 |
+
ground truth boxes and default boxes.
|
| 54 |
+
E.g.:
|
| 55 |
+
A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B)
|
| 56 |
+
Args:
|
| 57 |
+
box_a: (tensor) Ground truth bounding boxes, Shape: [num_objects,4]
|
| 58 |
+
box_b: (tensor) Prior boxes from priorbox layers, Shape: [num_priors,4]
|
| 59 |
+
Return:
|
| 60 |
+
jaccard overlap: (tensor) Shape: [box_a.size(0), box_b.size(0)]
|
| 61 |
+
"""
|
| 62 |
+
inter = intersect(box_a, box_b)
|
| 63 |
+
area_a = ((box_a[:, 2]-box_a[:, 0]) *
|
| 64 |
+
(box_a[:, 3]-box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A,B]
|
| 65 |
+
area_b = ((box_b[:, 2]-box_b[:, 0]) *
|
| 66 |
+
(box_b[:, 3]-box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A,B]
|
| 67 |
+
union = area_a + area_b - inter
|
| 68 |
+
return inter / union # [A,B]
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def matrix_iou(a, b):
|
| 72 |
+
"""
|
| 73 |
+
return iou of a and b, numpy version for data augenmentation
|
| 74 |
+
"""
|
| 75 |
+
lt = np.maximum(a[:, np.newaxis, :2], b[:, :2])
|
| 76 |
+
rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:])
|
| 77 |
+
|
| 78 |
+
area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2)
|
| 79 |
+
area_a = np.prod(a[:, 2:] - a[:, :2], axis=1)
|
| 80 |
+
area_b = np.prod(b[:, 2:] - b[:, :2], axis=1)
|
| 81 |
+
return area_i / (area_a[:, np.newaxis] + area_b - area_i)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def matrix_iof(a, b):
|
| 85 |
+
"""
|
| 86 |
+
return iof of a and b, numpy version for data augenmentation
|
| 87 |
+
"""
|
| 88 |
+
lt = np.maximum(a[:, np.newaxis, :2], b[:, :2])
|
| 89 |
+
rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:])
|
| 90 |
+
|
| 91 |
+
area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2)
|
| 92 |
+
area_a = np.prod(a[:, 2:] - a[:, :2], axis=1)
|
| 93 |
+
return area_i / np.maximum(area_a[:, np.newaxis], 1)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def match(threshold, truths, priors, variances, labels, landms, loc_t, conf_t, landm_t, idx):
|
| 97 |
+
"""Match each prior box with the ground truth box of the highest jaccard
|
| 98 |
+
overlap, encode the bounding boxes, then return the matched indices
|
| 99 |
+
corresponding to both confidence and location preds.
|
| 100 |
+
Args:
|
| 101 |
+
threshold: (float) The overlap threshold used when mathing boxes.
|
| 102 |
+
truths: (tensor) Ground truth boxes, Shape: [num_obj, 4].
|
| 103 |
+
priors: (tensor) Prior boxes from priorbox layers, Shape: [n_priors,4].
|
| 104 |
+
variances: (tensor) Variances corresponding to each prior coord,
|
| 105 |
+
Shape: [num_priors, 4].
|
| 106 |
+
labels: (tensor) All the class labels for the image, Shape: [num_obj].
|
| 107 |
+
landms: (tensor) Ground truth landms, Shape [num_obj, 10].
|
| 108 |
+
loc_t: (tensor) Tensor to be filled w/ endcoded location targets.
|
| 109 |
+
conf_t: (tensor) Tensor to be filled w/ matched indices for conf preds.
|
| 110 |
+
landm_t: (tensor) Tensor to be filled w/ endcoded landm targets.
|
| 111 |
+
idx: (int) current batch index
|
| 112 |
+
Return:
|
| 113 |
+
The matched indices corresponding to 1)location 2)confidence 3)landm preds.
|
| 114 |
+
"""
|
| 115 |
+
# jaccard index
|
| 116 |
+
overlaps = jaccard(
|
| 117 |
+
truths,
|
| 118 |
+
point_form(priors)
|
| 119 |
+
)
|
| 120 |
+
# (Bipartite Matching)
|
| 121 |
+
# [1,num_objects] best prior for each ground truth
|
| 122 |
+
best_prior_overlap, best_prior_idx = overlaps.max(1, keepdim=True)
|
| 123 |
+
|
| 124 |
+
# ignore hard gt
|
| 125 |
+
valid_gt_idx = best_prior_overlap[:, 0] >= 0.2
|
| 126 |
+
best_prior_idx_filter = best_prior_idx[valid_gt_idx, :]
|
| 127 |
+
if best_prior_idx_filter.shape[0] <= 0:
|
| 128 |
+
loc_t[idx] = 0
|
| 129 |
+
conf_t[idx] = 0
|
| 130 |
+
return
|
| 131 |
+
|
| 132 |
+
# [1,num_priors] best ground truth for each prior
|
| 133 |
+
best_truth_overlap, best_truth_idx = overlaps.max(0, keepdim=True)
|
| 134 |
+
best_truth_idx.squeeze_(0)
|
| 135 |
+
best_truth_overlap.squeeze_(0)
|
| 136 |
+
best_prior_idx.squeeze_(1)
|
| 137 |
+
best_prior_idx_filter.squeeze_(1)
|
| 138 |
+
best_prior_overlap.squeeze_(1)
|
| 139 |
+
best_truth_overlap.index_fill_(0, best_prior_idx_filter, 2) # ensure best prior
|
| 140 |
+
# TODO refactor: index best_prior_idx with long tensor
|
| 141 |
+
# ensure every gt matches with its prior of max overlap
|
| 142 |
+
for j in range(best_prior_idx.size(0)): # 判别此anchor是预测哪一个boxes
|
| 143 |
+
best_truth_idx[best_prior_idx[j]] = j
|
| 144 |
+
matches = truths[best_truth_idx] # Shape: [num_priors,4] 此处为每一个anchor对应的bbox取出来
|
| 145 |
+
conf = labels[best_truth_idx] # Shape: [num_priors] 此处为每一个anchor对应的label取出来
|
| 146 |
+
conf[best_truth_overlap < threshold] = 0 # label as background overlap<0.35的全部作为负样本
|
| 147 |
+
loc = encode(matches, priors, variances)
|
| 148 |
+
|
| 149 |
+
matches_landm = landms[best_truth_idx]
|
| 150 |
+
landm = encode_landm(matches_landm, priors, variances)
|
| 151 |
+
loc_t[idx] = loc # [num_priors,4] encoded offsets to learn
|
| 152 |
+
conf_t[idx] = conf # [num_priors] top class label for each prior
|
| 153 |
+
landm_t[idx] = landm
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def encode(matched, priors, variances):
|
| 157 |
+
"""Encode the variances from the priorbox layers into the ground truth boxes
|
| 158 |
+
we have matched (based on jaccard overlap) with the prior boxes.
|
| 159 |
+
Args:
|
| 160 |
+
matched: (tensor) Coords of ground truth for each prior in point-form
|
| 161 |
+
Shape: [num_priors, 4].
|
| 162 |
+
priors: (tensor) Prior boxes in center-offset form
|
| 163 |
+
Shape: [num_priors,4].
|
| 164 |
+
variances: (list[float]) Variances of priorboxes
|
| 165 |
+
Return:
|
| 166 |
+
encoded boxes (tensor), Shape: [num_priors, 4]
|
| 167 |
+
"""
|
| 168 |
+
|
| 169 |
+
# dist b/t match center and prior's center
|
| 170 |
+
g_cxcy = (matched[:, :2] + matched[:, 2:])/2 - priors[:, :2]
|
| 171 |
+
# encode variance
|
| 172 |
+
g_cxcy /= (variances[0] * priors[:, 2:])
|
| 173 |
+
# match wh / prior wh
|
| 174 |
+
g_wh = (matched[:, 2:] - matched[:, :2]) / priors[:, 2:]
|
| 175 |
+
g_wh = torch.log(g_wh) / variances[1]
|
| 176 |
+
# return target for smooth_l1_loss
|
| 177 |
+
return torch.cat([g_cxcy, g_wh], 1) # [num_priors,4]
|
| 178 |
+
|
| 179 |
+
def encode_landm(matched, priors, variances):
|
| 180 |
+
"""Encode the variances from the priorbox layers into the ground truth boxes
|
| 181 |
+
we have matched (based on jaccard overlap) with the prior boxes.
|
| 182 |
+
Args:
|
| 183 |
+
matched: (tensor) Coords of ground truth for each prior in point-form
|
| 184 |
+
Shape: [num_priors, 10].
|
| 185 |
+
priors: (tensor) Prior boxes in center-offset form
|
| 186 |
+
Shape: [num_priors,4].
|
| 187 |
+
variances: (list[float]) Variances of priorboxes
|
| 188 |
+
Return:
|
| 189 |
+
encoded landm (tensor), Shape: [num_priors, 10]
|
| 190 |
+
"""
|
| 191 |
+
|
| 192 |
+
# dist b/t match center and prior's center
|
| 193 |
+
matched = torch.reshape(matched, (matched.size(0), 5, 2))
|
| 194 |
+
priors_cx = priors[:, 0].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2)
|
| 195 |
+
priors_cy = priors[:, 1].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2)
|
| 196 |
+
priors_w = priors[:, 2].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2)
|
| 197 |
+
priors_h = priors[:, 3].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2)
|
| 198 |
+
priors = torch.cat([priors_cx, priors_cy, priors_w, priors_h], dim=2)
|
| 199 |
+
g_cxcy = matched[:, :, :2] - priors[:, :, :2]
|
| 200 |
+
# encode variance
|
| 201 |
+
g_cxcy /= (variances[0] * priors[:, :, 2:])
|
| 202 |
+
# g_cxcy /= priors[:, :, 2:]
|
| 203 |
+
g_cxcy = g_cxcy.reshape(g_cxcy.size(0), -1)
|
| 204 |
+
# return target for smooth_l1_loss
|
| 205 |
+
return g_cxcy
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
# Adapted from https://github.com/Hakuyume/chainer-ssd
|
| 209 |
+
def decode(loc, priors, variances):
|
| 210 |
+
"""Decode locations from predictions using priors to undo
|
| 211 |
+
the encoding we did for offset regression at train time.
|
| 212 |
+
Args:
|
| 213 |
+
loc (tensor): location predictions for loc layers,
|
| 214 |
+
Shape: [num_priors,4]
|
| 215 |
+
priors (tensor): Prior boxes in center-offset form.
|
| 216 |
+
Shape: [num_priors,4].
|
| 217 |
+
variances: (list[float]) Variances of priorboxes
|
| 218 |
+
Return:
|
| 219 |
+
decoded bounding box predictions
|
| 220 |
+
"""
|
| 221 |
+
|
| 222 |
+
boxes = torch.cat((
|
| 223 |
+
priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:],
|
| 224 |
+
priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1)
|
| 225 |
+
boxes[:, :2] -= boxes[:, 2:] / 2
|
| 226 |
+
boxes[:, 2:] += boxes[:, :2]
|
| 227 |
+
return boxes
|
| 228 |
+
|
| 229 |
+
def decode_landm(pre, priors, variances):
|
| 230 |
+
"""Decode landm from predictions using priors to undo
|
| 231 |
+
the encoding we did for offset regression at train time.
|
| 232 |
+
Args:
|
| 233 |
+
pre (tensor): landm predictions for loc layers,
|
| 234 |
+
Shape: [num_priors,10]
|
| 235 |
+
priors (tensor): Prior boxes in center-offset form.
|
| 236 |
+
Shape: [num_priors,4].
|
| 237 |
+
variances: (list[float]) Variances of priorboxes
|
| 238 |
+
Return:
|
| 239 |
+
decoded landm predictions
|
| 240 |
+
"""
|
| 241 |
+
landms = torch.cat((priors[:, :2] + pre[:, :2] * variances[0] * priors[:, 2:],
|
| 242 |
+
priors[:, :2] + pre[:, 2:4] * variances[0] * priors[:, 2:],
|
| 243 |
+
priors[:, :2] + pre[:, 4:6] * variances[0] * priors[:, 2:],
|
| 244 |
+
priors[:, :2] + pre[:, 6:8] * variances[0] * priors[:, 2:],
|
| 245 |
+
priors[:, :2] + pre[:, 8:10] * variances[0] * priors[:, 2:],
|
| 246 |
+
), dim=1)
|
| 247 |
+
return landms
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def log_sum_exp(x):
|
| 251 |
+
"""Utility function for computing log_sum_exp while determining
|
| 252 |
+
This will be used to determine unaveraged confidence loss across
|
| 253 |
+
all examples in a batch.
|
| 254 |
+
Args:
|
| 255 |
+
x (Variable(tensor)): conf_preds from conf layers
|
| 256 |
+
"""
|
| 257 |
+
x_max = x.data.max()
|
| 258 |
+
return torch.log(torch.sum(torch.exp(x-x_max), 1, keepdim=True)) + x_max
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
# Original author: Francisco Massa:
|
| 262 |
+
# https://github.com/fmassa/object-detection.torch
|
| 263 |
+
# Ported to PyTorch by Max deGroot (02/01/2017)
|
| 264 |
+
def nms(boxes, scores, overlap=0.5, top_k=200):
|
| 265 |
+
"""Apply non-maximum suppression at test time to avoid detecting too many
|
| 266 |
+
overlapping bounding boxes for a given object.
|
| 267 |
+
Args:
|
| 268 |
+
boxes: (tensor) The location preds for the img, Shape: [num_priors,4].
|
| 269 |
+
scores: (tensor) The class predscores for the img, Shape:[num_priors].
|
| 270 |
+
overlap: (float) The overlap thresh for suppressing unnecessary boxes.
|
| 271 |
+
top_k: (int) The Maximum number of box preds to consider.
|
| 272 |
+
Return:
|
| 273 |
+
The indices of the kept boxes with respect to num_priors.
|
| 274 |
+
"""
|
| 275 |
+
|
| 276 |
+
keep = torch.Tensor(scores.size(0)).fill_(0).long()
|
| 277 |
+
if boxes.numel() == 0:
|
| 278 |
+
return keep
|
| 279 |
+
x1 = boxes[:, 0]
|
| 280 |
+
y1 = boxes[:, 1]
|
| 281 |
+
x2 = boxes[:, 2]
|
| 282 |
+
y2 = boxes[:, 3]
|
| 283 |
+
area = torch.mul(x2 - x1, y2 - y1)
|
| 284 |
+
v, idx = scores.sort(0) # sort in ascending order
|
| 285 |
+
# I = I[v >= 0.01]
|
| 286 |
+
idx = idx[-top_k:] # indices of the top-k largest vals
|
| 287 |
+
xx1 = boxes.new()
|
| 288 |
+
yy1 = boxes.new()
|
| 289 |
+
xx2 = boxes.new()
|
| 290 |
+
yy2 = boxes.new()
|
| 291 |
+
w = boxes.new()
|
| 292 |
+
h = boxes.new()
|
| 293 |
+
|
| 294 |
+
# keep = torch.Tensor()
|
| 295 |
+
count = 0
|
| 296 |
+
while idx.numel() > 0:
|
| 297 |
+
i = idx[-1] # index of current largest val
|
| 298 |
+
# keep.append(i)
|
| 299 |
+
keep[count] = i
|
| 300 |
+
count += 1
|
| 301 |
+
if idx.size(0) == 1:
|
| 302 |
+
break
|
| 303 |
+
idx = idx[:-1] # remove kept element from view
|
| 304 |
+
# load bboxes of next highest vals
|
| 305 |
+
torch.index_select(x1, 0, idx, out=xx1)
|
| 306 |
+
torch.index_select(y1, 0, idx, out=yy1)
|
| 307 |
+
torch.index_select(x2, 0, idx, out=xx2)
|
| 308 |
+
torch.index_select(y2, 0, idx, out=yy2)
|
| 309 |
+
# store element-wise max with next highest score
|
| 310 |
+
xx1 = torch.clamp(xx1, min=x1[i])
|
| 311 |
+
yy1 = torch.clamp(yy1, min=y1[i])
|
| 312 |
+
xx2 = torch.clamp(xx2, max=x2[i])
|
| 313 |
+
yy2 = torch.clamp(yy2, max=y2[i])
|
| 314 |
+
w.resize_as_(xx2)
|
| 315 |
+
h.resize_as_(yy2)
|
| 316 |
+
w = xx2 - xx1
|
| 317 |
+
h = yy2 - yy1
|
| 318 |
+
# check sizes of xx1 and xx2.. after each iteration
|
| 319 |
+
w = torch.clamp(w, min=0.0)
|
| 320 |
+
h = torch.clamp(h, min=0.0)
|
| 321 |
+
inter = w*h
|
| 322 |
+
# IoU = i / (area(a) + area(b) - i)
|
| 323 |
+
rem_areas = torch.index_select(area, 0, idx) # load remaining areas)
|
| 324 |
+
union = (rem_areas - inter) + area[i]
|
| 325 |
+
IoU = inter/union # store result in iou
|
| 326 |
+
# keep only elements with an IoU <= overlap
|
| 327 |
+
idx = idx[IoU.le(overlap)]
|
| 328 |
+
return keep, count
|
| 329 |
+
|
| 330 |
+
|
third_party/GPEN/face_detect/utils/nms/__init__.py
ADDED
|
File without changes
|
third_party/GPEN/face_detect/utils/nms/py_cpu_nms.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# --------------------------------------------------------
|
| 2 |
+
# Fast R-CNN
|
| 3 |
+
# Copyright (c) 2015 Microsoft
|
| 4 |
+
# Licensed under The MIT License [see LICENSE for details]
|
| 5 |
+
# Written by Ross Girshick
|
| 6 |
+
# --------------------------------------------------------
|
| 7 |
+
|
| 8 |
+
import numpy as np
|
| 9 |
+
|
| 10 |
+
def py_cpu_nms(dets, thresh):
|
| 11 |
+
"""Pure Python NMS baseline."""
|
| 12 |
+
x1 = dets[:, 0]
|
| 13 |
+
y1 = dets[:, 1]
|
| 14 |
+
x2 = dets[:, 2]
|
| 15 |
+
y2 = dets[:, 3]
|
| 16 |
+
scores = dets[:, 4]
|
| 17 |
+
|
| 18 |
+
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
|
| 19 |
+
order = scores.argsort()[::-1]
|
| 20 |
+
|
| 21 |
+
keep = []
|
| 22 |
+
while order.size > 0:
|
| 23 |
+
i = order[0]
|
| 24 |
+
keep.append(i)
|
| 25 |
+
xx1 = np.maximum(x1[i], x1[order[1:]])
|
| 26 |
+
yy1 = np.maximum(y1[i], y1[order[1:]])
|
| 27 |
+
xx2 = np.minimum(x2[i], x2[order[1:]])
|
| 28 |
+
yy2 = np.minimum(y2[i], y2[order[1:]])
|
| 29 |
+
|
| 30 |
+
w = np.maximum(0.0, xx2 - xx1 + 1)
|
| 31 |
+
h = np.maximum(0.0, yy2 - yy1 + 1)
|
| 32 |
+
inter = w * h
|
| 33 |
+
ovr = inter / (areas[i] + areas[order[1:]] - inter)
|
| 34 |
+
|
| 35 |
+
inds = np.where(ovr <= thresh)[0]
|
| 36 |
+
order = order[inds + 1]
|
| 37 |
+
|
| 38 |
+
return keep
|
third_party/GPEN/face_detect/utils/timer.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# --------------------------------------------------------
|
| 2 |
+
# Fast R-CNN
|
| 3 |
+
# Copyright (c) 2015 Microsoft
|
| 4 |
+
# Licensed under The MIT License [see LICENSE for details]
|
| 5 |
+
# Written by Ross Girshick
|
| 6 |
+
# --------------------------------------------------------
|
| 7 |
+
|
| 8 |
+
import time
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Timer(object):
|
| 12 |
+
"""A simple timer."""
|
| 13 |
+
def __init__(self):
|
| 14 |
+
self.total_time = 0.
|
| 15 |
+
self.calls = 0
|
| 16 |
+
self.start_time = 0.
|
| 17 |
+
self.diff = 0.
|
| 18 |
+
self.average_time = 0.
|
| 19 |
+
|
| 20 |
+
def tic(self):
|
| 21 |
+
# using time.time instead of time.clock because time time.clock
|
| 22 |
+
# does not normalize for multithreading
|
| 23 |
+
self.start_time = time.time()
|
| 24 |
+
|
| 25 |
+
def toc(self, average=True):
|
| 26 |
+
self.diff = time.time() - self.start_time
|
| 27 |
+
self.total_time += self.diff
|
| 28 |
+
self.calls += 1
|
| 29 |
+
self.average_time = self.total_time / self.calls
|
| 30 |
+
if average:
|
| 31 |
+
return self.average_time
|
| 32 |
+
else:
|
| 33 |
+
return self.diff
|
| 34 |
+
|
| 35 |
+
def clear(self):
|
| 36 |
+
self.total_time = 0.
|
| 37 |
+
self.calls = 0
|
| 38 |
+
self.start_time = 0.
|
| 39 |
+
self.diff = 0.
|
| 40 |
+
self.average_time = 0.
|
third_party/GPEN/face_enhancement.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
'''
|
| 5 |
+
import os
|
| 6 |
+
import cv2
|
| 7 |
+
import glob
|
| 8 |
+
import time
|
| 9 |
+
import argparse
|
| 10 |
+
import numpy as np
|
| 11 |
+
from PIL import Image
|
| 12 |
+
|
| 13 |
+
import third_party.GPEN.__init_paths
|
| 14 |
+
from third_party.GPEN.face_detect.retinaface_detection import RetinaFaceDetection
|
| 15 |
+
from third_party.GPEN.face_parse.face_parsing import FaceParse
|
| 16 |
+
from third_party.GPEN.face_model.face_gan import FaceGAN
|
| 17 |
+
from third_party.GPEN.sr_model.real_esrnet import RealESRNet
|
| 18 |
+
from third_party.GPEN.align_faces import warp_and_crop_face, get_reference_facial_points
|
| 19 |
+
|
| 20 |
+
class FaceEnhancement(object):
|
| 21 |
+
def __init__(self, base_dir='./', in_size=512, out_size=512, model=None, use_sr=True, sr_model=None, channel_multiplier=2, narrow=1, key=None, device='cuda'):
|
| 22 |
+
self.facedetector = RetinaFaceDetection(base_dir, device)
|
| 23 |
+
self.facegan = FaceGAN(base_dir, in_size, out_size, model, channel_multiplier, narrow, key, device=device)
|
| 24 |
+
self.srmodel = RealESRNet(base_dir, sr_model, device=device)
|
| 25 |
+
self.faceparser = FaceParse(base_dir, device=device)
|
| 26 |
+
self.use_sr = use_sr
|
| 27 |
+
self.in_size = in_size
|
| 28 |
+
self.out_size = out_size
|
| 29 |
+
self.threshold = 0.9
|
| 30 |
+
|
| 31 |
+
# the mask for pasting restored faces back
|
| 32 |
+
self.mask = np.zeros((512, 512), np.float32)
|
| 33 |
+
cv2.rectangle(self.mask, (26, 26), (486, 486), (1, 1, 1), -1, cv2.LINE_AA)
|
| 34 |
+
self.mask = cv2.GaussianBlur(self.mask, (101, 101), 11)
|
| 35 |
+
self.mask = cv2.GaussianBlur(self.mask, (101, 101), 11)
|
| 36 |
+
|
| 37 |
+
self.kernel = np.array((
|
| 38 |
+
[0.0625, 0.125, 0.0625],
|
| 39 |
+
[0.125, 0.25, 0.125],
|
| 40 |
+
[0.0625, 0.125, 0.0625]), dtype="float32")
|
| 41 |
+
|
| 42 |
+
# get the reference 5 landmarks position in the crop settings
|
| 43 |
+
default_square = True
|
| 44 |
+
inner_padding_factor = 0.25
|
| 45 |
+
outer_padding = (0, 0)
|
| 46 |
+
self.reference_5pts = get_reference_facial_points(
|
| 47 |
+
(self.in_size, self.in_size), inner_padding_factor, outer_padding, default_square)
|
| 48 |
+
|
| 49 |
+
def mask_postprocess(self, mask, thres=20):
|
| 50 |
+
mask[:thres, :] = 0; mask[-thres:, :] = 0
|
| 51 |
+
mask[:, :thres] = 0; mask[:, -thres:] = 0
|
| 52 |
+
mask = cv2.GaussianBlur(mask, (101, 101), 11)
|
| 53 |
+
mask = cv2.GaussianBlur(mask, (101, 101), 11)
|
| 54 |
+
return mask.astype(np.float32)
|
| 55 |
+
|
| 56 |
+
def process(self, img, aligned=False):
|
| 57 |
+
orig_faces, enhanced_faces = [], []
|
| 58 |
+
if aligned:
|
| 59 |
+
ef = self.facegan.process(img)
|
| 60 |
+
orig_faces.append(img)
|
| 61 |
+
enhanced_faces.append(ef)
|
| 62 |
+
|
| 63 |
+
if self.use_sr:
|
| 64 |
+
ef = self.srmodel.process(ef)
|
| 65 |
+
|
| 66 |
+
return ef, orig_faces, enhanced_faces
|
| 67 |
+
|
| 68 |
+
if self.use_sr:
|
| 69 |
+
img_sr = self.srmodel.process(img)
|
| 70 |
+
if img_sr is not None:
|
| 71 |
+
img = cv2.resize(img, img_sr.shape[:2][::-1])
|
| 72 |
+
|
| 73 |
+
facebs, landms = self.facedetector.detect(img)
|
| 74 |
+
|
| 75 |
+
height, width = img.shape[:2]
|
| 76 |
+
full_mask = np.zeros((height, width), dtype=np.float32)
|
| 77 |
+
full_img = np.zeros(img.shape, dtype=np.uint8)
|
| 78 |
+
|
| 79 |
+
for i, (faceb, facial5points) in enumerate(zip(facebs, landms)):
|
| 80 |
+
if faceb[4]<self.threshold: continue
|
| 81 |
+
fh, fw = (faceb[3]-faceb[1]), (faceb[2]-faceb[0])
|
| 82 |
+
|
| 83 |
+
facial5points = np.reshape(facial5points, (2, 5))
|
| 84 |
+
|
| 85 |
+
of, tfm_inv = warp_and_crop_face(img, facial5points, reference_pts=self.reference_5pts, crop_size=(self.in_size, self.in_size))
|
| 86 |
+
|
| 87 |
+
# enhance the face
|
| 88 |
+
ef = self.facegan.process(of)
|
| 89 |
+
|
| 90 |
+
orig_faces.append(of)
|
| 91 |
+
enhanced_faces.append(ef)
|
| 92 |
+
|
| 93 |
+
#tmp_mask = self.mask
|
| 94 |
+
tmp_mask = self.mask_postprocess(self.faceparser.process(ef)[0]/255.)
|
| 95 |
+
tmp_mask = cv2.resize(tmp_mask, (self.in_size, self.in_size))
|
| 96 |
+
tmp_mask = cv2.warpAffine(tmp_mask, tfm_inv, (width, height), flags=3)
|
| 97 |
+
|
| 98 |
+
if min(fh, fw)<100: # gaussian filter for small faces
|
| 99 |
+
ef = cv2.filter2D(ef, -1, self.kernel)
|
| 100 |
+
|
| 101 |
+
if self.in_size!=self.out_size:
|
| 102 |
+
ef = cv2.resize(ef, (self.in_size, self.in_size))
|
| 103 |
+
tmp_img = cv2.warpAffine(ef, tfm_inv, (width, height), flags=3)
|
| 104 |
+
|
| 105 |
+
mask = tmp_mask - full_mask
|
| 106 |
+
full_mask[np.where(mask>0)] = tmp_mask[np.where(mask>0)]
|
| 107 |
+
full_img[np.where(mask>0)] = tmp_img[np.where(mask>0)]
|
| 108 |
+
|
| 109 |
+
full_mask = full_mask[:, :, np.newaxis]
|
| 110 |
+
if self.use_sr and img_sr is not None:
|
| 111 |
+
img = cv2.convertScaleAbs(img_sr*(1-full_mask) + full_img*full_mask)
|
| 112 |
+
else:
|
| 113 |
+
img = cv2.convertScaleAbs(img*(1-full_mask) + full_img*full_mask)
|
| 114 |
+
|
| 115 |
+
return img, orig_faces, enhanced_faces
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
if __name__=='__main__':
|
| 119 |
+
parser = argparse.ArgumentParser()
|
| 120 |
+
parser.add_argument('--model', type=str, default='GPEN-BFR-512', help='GPEN model')
|
| 121 |
+
parser.add_argument('--key', type=str, default=None, help='key of GPEN model')
|
| 122 |
+
parser.add_argument('--in_size', type=int, default=512, help='in resolution of GPEN')
|
| 123 |
+
parser.add_argument('--out_size', type=int, default=512, help='out resolution of GPEN')
|
| 124 |
+
parser.add_argument('--channel_multiplier', type=int, default=2, help='channel multiplier of GPEN')
|
| 125 |
+
parser.add_argument('--narrow', type=float, default=1, help='channel narrow scale')
|
| 126 |
+
parser.add_argument('--use_sr', action='store_true', help='use sr or not')
|
| 127 |
+
parser.add_argument('--use_cuda', action='store_true', help='use cuda or not')
|
| 128 |
+
parser.add_argument('--sr_model', type=str, default='rrdb_realesrnet_psnr', help='SR model')
|
| 129 |
+
parser.add_argument('--sr_scale', type=int, default=2, help='SR scale')
|
| 130 |
+
parser.add_argument('--indir', type=str, default='examples/imgs', help='input folder')
|
| 131 |
+
parser.add_argument('--outdir', type=str, default='results/outs-BFR', help='output folder')
|
| 132 |
+
args = parser.parse_args()
|
| 133 |
+
|
| 134 |
+
#model = {'name':'GPEN-BFR-512', 'size':512, 'channel_multiplier':2, 'narrow':1}
|
| 135 |
+
#model = {'name':'GPEN-BFR-256', 'size':256, 'channel_multiplier':1, 'narrow':0.5}
|
| 136 |
+
|
| 137 |
+
os.makedirs(args.outdir, exist_ok=True)
|
| 138 |
+
|
| 139 |
+
faceenhancer = FaceEnhancement(in_size=args.in_size, out_size=args.out_size, model=args.model, use_sr=args.use_sr, sr_model=args.sr_model, channel_multiplier=args.channel_multiplier, narrow=args.narrow, key=args.key, device='cuda' if args.use_cuda else 'cpu')
|
| 140 |
+
|
| 141 |
+
files = sorted(glob.glob(os.path.join(args.indir, '*.*g')))
|
| 142 |
+
for n, file in enumerate(files[:]):
|
| 143 |
+
filename = os.path.basename(file)
|
| 144 |
+
|
| 145 |
+
im = cv2.imread(file, cv2.IMREAD_COLOR) # BGR
|
| 146 |
+
if not isinstance(im, np.ndarray): print(filename, 'error'); continue
|
| 147 |
+
#im = cv2.resize(im, (0,0), fx=2, fy=2) # optional
|
| 148 |
+
|
| 149 |
+
img, orig_faces, enhanced_faces = faceenhancer.process(im)
|
| 150 |
+
|
| 151 |
+
im = cv2.resize(im, img.shape[:2][::-1])
|
| 152 |
+
cv2.imwrite(os.path.join(args.outdir, '.'.join(filename.split('.')[:-1])+'_COMP.jpg'), np.hstack((im, img)))
|
| 153 |
+
cv2.imwrite(os.path.join(args.outdir, '.'.join(filename.split('.')[:-1])+'_GPEN.jpg'), img)
|
| 154 |
+
|
| 155 |
+
for m, (ef, of) in enumerate(zip(enhanced_faces, orig_faces)):
|
| 156 |
+
of = cv2.resize(of, ef.shape[:2])
|
| 157 |
+
cv2.imwrite(os.path.join(args.outdir, '.'.join(filename.split('.')[:-1])+'_face%02d'%m+'.jpg'), np.hstack((of, ef)))
|
| 158 |
+
|
| 159 |
+
if n%10==0: print(n, filename)
|
| 160 |
+
|
| 161 |
+
print('finished!')
|
third_party/GPEN/face_inpainting.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
'''
|
| 5 |
+
import os
|
| 6 |
+
import cv2
|
| 7 |
+
import glob
|
| 8 |
+
import time
|
| 9 |
+
import math
|
| 10 |
+
import numpy as np
|
| 11 |
+
from PIL import Image, ImageDraw
|
| 12 |
+
import __init_paths
|
| 13 |
+
from face_model.face_gan import FaceGAN
|
| 14 |
+
|
| 15 |
+
# modified by yangxy
|
| 16 |
+
def brush_stroke_mask(img, color=(255,255,255)):
|
| 17 |
+
min_num_vertex = 8
|
| 18 |
+
max_num_vertex = 28
|
| 19 |
+
mean_angle = 2*math.pi / 5
|
| 20 |
+
angle_range = 2*math.pi / 15
|
| 21 |
+
min_width = 12
|
| 22 |
+
max_width = 80
|
| 23 |
+
def generate_mask(H, W, img=None):
|
| 24 |
+
average_radius = math.sqrt(H*H+W*W) / 8
|
| 25 |
+
mask = Image.new('RGB', (W, H), 0)
|
| 26 |
+
if img is not None: mask = img #Image.fromarray(img)
|
| 27 |
+
|
| 28 |
+
for _ in range(np.random.randint(1, 4)):
|
| 29 |
+
num_vertex = np.random.randint(min_num_vertex, max_num_vertex)
|
| 30 |
+
angle_min = mean_angle - np.random.uniform(0, angle_range)
|
| 31 |
+
angle_max = mean_angle + np.random.uniform(0, angle_range)
|
| 32 |
+
angles = []
|
| 33 |
+
vertex = []
|
| 34 |
+
for i in range(num_vertex):
|
| 35 |
+
if i % 2 == 0:
|
| 36 |
+
angles.append(2*math.pi - np.random.uniform(angle_min, angle_max))
|
| 37 |
+
else:
|
| 38 |
+
angles.append(np.random.uniform(angle_min, angle_max))
|
| 39 |
+
|
| 40 |
+
h, w = mask.size
|
| 41 |
+
vertex.append((int(np.random.randint(0, w)), int(np.random.randint(0, h))))
|
| 42 |
+
for i in range(num_vertex):
|
| 43 |
+
r = np.clip(
|
| 44 |
+
np.random.normal(loc=average_radius, scale=average_radius//2),
|
| 45 |
+
0, 2*average_radius)
|
| 46 |
+
new_x = np.clip(vertex[-1][0] + r * math.cos(angles[i]), 0, w)
|
| 47 |
+
new_y = np.clip(vertex[-1][1] + r * math.sin(angles[i]), 0, h)
|
| 48 |
+
vertex.append((int(new_x), int(new_y)))
|
| 49 |
+
|
| 50 |
+
draw = ImageDraw.Draw(mask)
|
| 51 |
+
width = int(np.random.uniform(min_width, max_width))
|
| 52 |
+
draw.line(vertex, fill=color, width=width)
|
| 53 |
+
for v in vertex:
|
| 54 |
+
draw.ellipse((v[0] - width//2,
|
| 55 |
+
v[1] - width//2,
|
| 56 |
+
v[0] + width//2,
|
| 57 |
+
v[1] + width//2),
|
| 58 |
+
fill=color)
|
| 59 |
+
|
| 60 |
+
return mask
|
| 61 |
+
|
| 62 |
+
width, height = img.size
|
| 63 |
+
mask = generate_mask(height, width, img)
|
| 64 |
+
return mask
|
| 65 |
+
|
| 66 |
+
class FaceInpainting(object):
|
| 67 |
+
def __init__(self, base_dir='./', size=1024, model=None, channel_multiplier=2):
|
| 68 |
+
self.facegan = FaceGAN(base_dir, size, model, channel_multiplier)
|
| 69 |
+
|
| 70 |
+
# make sure the face image is well aligned. Please refer to face_enhancement.py
|
| 71 |
+
def process(self, brokenf):
|
| 72 |
+
# complete the face
|
| 73 |
+
out = self.facegan.process(brokenf)
|
| 74 |
+
|
| 75 |
+
return out
|
| 76 |
+
|
| 77 |
+
if __name__=='__main__':
|
| 78 |
+
model = {'name':'GPEN-Inpainting-1024', 'size':1024}
|
| 79 |
+
|
| 80 |
+
indir = 'examples/ffhq-10'
|
| 81 |
+
outdir = 'examples/outs-inpainting'
|
| 82 |
+
os.makedirs(outdir, exist_ok=True)
|
| 83 |
+
|
| 84 |
+
faceinpainter = FaceInpainting(size=model['size'], model=model['name'], channel_multiplier=2)
|
| 85 |
+
|
| 86 |
+
files = sorted(glob.glob(os.path.join(indir, '*.*g')))
|
| 87 |
+
for n, file in enumerate(files[:]):
|
| 88 |
+
filename = os.path.basename(file)
|
| 89 |
+
|
| 90 |
+
originf = cv2.imread(file, cv2.IMREAD_COLOR)
|
| 91 |
+
|
| 92 |
+
brokenf = np.asarray(brush_stroke_mask(Image.fromarray(originf)))
|
| 93 |
+
|
| 94 |
+
completef = faceinpainter.process(brokenf)
|
| 95 |
+
|
| 96 |
+
originf = cv2.resize(originf, completef.shape[:2])
|
| 97 |
+
brokenf = cv2.resize(brokenf, completef.shape[:2])
|
| 98 |
+
cv2.imwrite(os.path.join(outdir, '.'.join(filename.split('.')[:-1])+'.jpg'), np.hstack((brokenf, completef, originf)))
|
| 99 |
+
|
| 100 |
+
if n%10==0: print(n, file)
|
| 101 |
+
|
third_party/GPEN/face_model/face_gan.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
"""
|
| 5 |
+
import torch
|
| 6 |
+
import os
|
| 7 |
+
import cv2
|
| 8 |
+
import glob
|
| 9 |
+
import numpy as np
|
| 10 |
+
from torch import nn
|
| 11 |
+
import torch.nn.functional as F
|
| 12 |
+
from torchvision import transforms, utils
|
| 13 |
+
from gpen_model import FullGenerator, FullGenerator_SR
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class FaceGAN(object):
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
base_dir="./",
|
| 20 |
+
in_size=512,
|
| 21 |
+
out_size=512,
|
| 22 |
+
model=None,
|
| 23 |
+
channel_multiplier=2,
|
| 24 |
+
narrow=1,
|
| 25 |
+
key=None,
|
| 26 |
+
is_norm=True,
|
| 27 |
+
device="cuda",
|
| 28 |
+
):
|
| 29 |
+
self.mfile = os.path.join(base_dir, "weights", model + ".pth")
|
| 30 |
+
self.n_mlp = 8
|
| 31 |
+
self.device = device
|
| 32 |
+
self.is_norm = is_norm
|
| 33 |
+
self.in_resolution = in_size
|
| 34 |
+
self.out_resolution = out_size
|
| 35 |
+
self.key = key
|
| 36 |
+
self.load_model(channel_multiplier, narrow)
|
| 37 |
+
|
| 38 |
+
def load_model(self, channel_multiplier=2, narrow=1):
|
| 39 |
+
if self.in_resolution == self.out_resolution:
|
| 40 |
+
self.model = FullGenerator(
|
| 41 |
+
self.in_resolution,
|
| 42 |
+
512,
|
| 43 |
+
self.n_mlp,
|
| 44 |
+
channel_multiplier,
|
| 45 |
+
narrow=narrow,
|
| 46 |
+
device=self.device,
|
| 47 |
+
)
|
| 48 |
+
else:
|
| 49 |
+
self.model = FullGenerator_SR(
|
| 50 |
+
self.in_resolution,
|
| 51 |
+
self.out_resolution,
|
| 52 |
+
512,
|
| 53 |
+
self.n_mlp,
|
| 54 |
+
channel_multiplier,
|
| 55 |
+
narrow=narrow,
|
| 56 |
+
device=self.device,
|
| 57 |
+
)
|
| 58 |
+
pretrained_dict = torch.load(self.mfile, map_location=torch.device("cpu"))
|
| 59 |
+
if self.key is not None:
|
| 60 |
+
pretrained_dict = pretrained_dict[self.key]
|
| 61 |
+
self.model.load_state_dict(pretrained_dict)
|
| 62 |
+
self.model.to(self.device)
|
| 63 |
+
self.model.eval()
|
| 64 |
+
|
| 65 |
+
def process(self, img):
|
| 66 |
+
img = cv2.resize(img, (self.in_resolution, self.in_resolution))
|
| 67 |
+
img_t = self.img2tensor(img)
|
| 68 |
+
|
| 69 |
+
with torch.no_grad():
|
| 70 |
+
out, __ = self.model(img_t)
|
| 71 |
+
|
| 72 |
+
out = self.tensor2img(out)
|
| 73 |
+
|
| 74 |
+
return out
|
| 75 |
+
|
| 76 |
+
def img2tensor(self, img):
|
| 77 |
+
img_t = torch.from_numpy(img).to(self.device) / 255.0
|
| 78 |
+
if self.is_norm:
|
| 79 |
+
img_t = (img_t - 0.5) / 0.5
|
| 80 |
+
img_t = img_t.permute(2, 0, 1).unsqueeze(0).flip(1) # BGR->RGB
|
| 81 |
+
return img_t
|
| 82 |
+
|
| 83 |
+
def tensor2img(self, img_t, pmax=255.0, imtype=np.uint8):
|
| 84 |
+
if self.is_norm:
|
| 85 |
+
img_t = img_t * 0.5 + 0.5
|
| 86 |
+
img_t = img_t.squeeze(0).permute(1, 2, 0).flip(2) # RGB->BGR
|
| 87 |
+
img_np = np.clip(img_t.float().cpu().numpy(), 0, 1) * pmax
|
| 88 |
+
|
| 89 |
+
return img_np.astype(imtype)
|
third_party/GPEN/face_model/gpen_model.py
ADDED
|
@@ -0,0 +1,941 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
"""
|
| 5 |
+
import math
|
| 6 |
+
import random
|
| 7 |
+
import functools
|
| 8 |
+
import operator
|
| 9 |
+
import itertools
|
| 10 |
+
|
| 11 |
+
import torch
|
| 12 |
+
from torch import nn
|
| 13 |
+
from torch.nn import functional as F
|
| 14 |
+
from torch.autograd import Function
|
| 15 |
+
|
| 16 |
+
from op import FusedLeakyReLU, fused_leaky_relu, upfirdn2d
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class PixelNorm(nn.Module):
|
| 20 |
+
def __init__(self):
|
| 21 |
+
super().__init__()
|
| 22 |
+
|
| 23 |
+
def forward(self, input):
|
| 24 |
+
return input * torch.rsqrt(torch.mean(input ** 2, dim=1, keepdim=True) + 1e-8)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def make_kernel(k):
|
| 28 |
+
k = torch.tensor(k, dtype=torch.float32)
|
| 29 |
+
|
| 30 |
+
if k.ndim == 1:
|
| 31 |
+
k = k[None, :] * k[:, None]
|
| 32 |
+
|
| 33 |
+
k /= k.sum()
|
| 34 |
+
|
| 35 |
+
return k
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Upsample(nn.Module):
|
| 39 |
+
def __init__(self, kernel, factor=2, device="cpu"):
|
| 40 |
+
super().__init__()
|
| 41 |
+
|
| 42 |
+
self.factor = factor
|
| 43 |
+
kernel = make_kernel(kernel) * (factor ** 2)
|
| 44 |
+
self.register_buffer("kernel", kernel)
|
| 45 |
+
|
| 46 |
+
p = kernel.shape[0] - factor
|
| 47 |
+
|
| 48 |
+
pad0 = (p + 1) // 2 + factor - 1
|
| 49 |
+
pad1 = p // 2
|
| 50 |
+
|
| 51 |
+
self.pad = (pad0, pad1)
|
| 52 |
+
self.device = device
|
| 53 |
+
|
| 54 |
+
def forward(self, input):
|
| 55 |
+
out = upfirdn2d(
|
| 56 |
+
input, self.kernel, up=self.factor, down=1, pad=self.pad, device=self.device
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
return out
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class Downsample(nn.Module):
|
| 63 |
+
def __init__(self, kernel, factor=2, device="cpu"):
|
| 64 |
+
super().__init__()
|
| 65 |
+
|
| 66 |
+
self.factor = factor
|
| 67 |
+
kernel = make_kernel(kernel)
|
| 68 |
+
self.register_buffer("kernel", kernel)
|
| 69 |
+
|
| 70 |
+
p = kernel.shape[0] - factor
|
| 71 |
+
|
| 72 |
+
pad0 = (p + 1) // 2
|
| 73 |
+
pad1 = p // 2
|
| 74 |
+
|
| 75 |
+
self.pad = (pad0, pad1)
|
| 76 |
+
self.device = device
|
| 77 |
+
|
| 78 |
+
def forward(self, input):
|
| 79 |
+
out = upfirdn2d(
|
| 80 |
+
input, self.kernel, up=1, down=self.factor, pad=self.pad, device=self.device
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
return out
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class Blur(nn.Module):
|
| 87 |
+
def __init__(self, kernel, pad, upsample_factor=1, device="cpu"):
|
| 88 |
+
super().__init__()
|
| 89 |
+
|
| 90 |
+
kernel = make_kernel(kernel)
|
| 91 |
+
|
| 92 |
+
if upsample_factor > 1:
|
| 93 |
+
kernel = kernel * (upsample_factor ** 2)
|
| 94 |
+
|
| 95 |
+
self.register_buffer("kernel", kernel)
|
| 96 |
+
|
| 97 |
+
self.pad = pad
|
| 98 |
+
self.device = device
|
| 99 |
+
|
| 100 |
+
def forward(self, input):
|
| 101 |
+
out = upfirdn2d(input, self.kernel, pad=self.pad, device=self.device)
|
| 102 |
+
|
| 103 |
+
return out
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class EqualConv2d(nn.Module):
|
| 107 |
+
def __init__(
|
| 108 |
+
self, in_channel, out_channel, kernel_size, stride=1, padding=0, bias=True
|
| 109 |
+
):
|
| 110 |
+
super().__init__()
|
| 111 |
+
|
| 112 |
+
self.weight = nn.Parameter(
|
| 113 |
+
torch.randn(out_channel, in_channel, kernel_size, kernel_size)
|
| 114 |
+
)
|
| 115 |
+
self.scale = 1 / math.sqrt(in_channel * kernel_size ** 2)
|
| 116 |
+
|
| 117 |
+
self.stride = stride
|
| 118 |
+
self.padding = padding
|
| 119 |
+
|
| 120 |
+
if bias:
|
| 121 |
+
self.bias = nn.Parameter(torch.zeros(out_channel))
|
| 122 |
+
|
| 123 |
+
else:
|
| 124 |
+
self.bias = None
|
| 125 |
+
|
| 126 |
+
def forward(self, input):
|
| 127 |
+
out = F.conv2d(
|
| 128 |
+
input,
|
| 129 |
+
self.weight * self.scale,
|
| 130 |
+
bias=self.bias,
|
| 131 |
+
stride=self.stride,
|
| 132 |
+
padding=self.padding,
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
return out
|
| 136 |
+
|
| 137 |
+
def __repr__(self):
|
| 138 |
+
return (
|
| 139 |
+
f"{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]},"
|
| 140 |
+
f" {self.weight.shape[2]}, stride={self.stride}, padding={self.padding})"
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class EqualLinear(nn.Module):
|
| 145 |
+
def __init__(
|
| 146 |
+
self,
|
| 147 |
+
in_dim,
|
| 148 |
+
out_dim,
|
| 149 |
+
bias=True,
|
| 150 |
+
bias_init=0,
|
| 151 |
+
lr_mul=1,
|
| 152 |
+
activation=None,
|
| 153 |
+
device="cpu",
|
| 154 |
+
):
|
| 155 |
+
super().__init__()
|
| 156 |
+
|
| 157 |
+
self.weight = nn.Parameter(torch.randn(out_dim, in_dim).div_(lr_mul))
|
| 158 |
+
|
| 159 |
+
if bias:
|
| 160 |
+
self.bias = nn.Parameter(torch.zeros(out_dim).fill_(bias_init))
|
| 161 |
+
|
| 162 |
+
else:
|
| 163 |
+
self.bias = None
|
| 164 |
+
|
| 165 |
+
self.activation = activation
|
| 166 |
+
self.device = device
|
| 167 |
+
|
| 168 |
+
self.scale = (1 / math.sqrt(in_dim)) * lr_mul
|
| 169 |
+
self.lr_mul = lr_mul
|
| 170 |
+
|
| 171 |
+
def forward(self, input):
|
| 172 |
+
if self.activation:
|
| 173 |
+
out = F.linear(input, self.weight * self.scale)
|
| 174 |
+
out = fused_leaky_relu(out, self.bias * self.lr_mul, device=self.device)
|
| 175 |
+
|
| 176 |
+
else:
|
| 177 |
+
out = F.linear(
|
| 178 |
+
input, self.weight * self.scale, bias=self.bias * self.lr_mul
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
return out
|
| 182 |
+
|
| 183 |
+
def __repr__(self):
|
| 184 |
+
return (
|
| 185 |
+
f"{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]})"
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
class ScaledLeakyReLU(nn.Module):
|
| 190 |
+
def __init__(self, negative_slope=0.2):
|
| 191 |
+
super().__init__()
|
| 192 |
+
|
| 193 |
+
self.negative_slope = negative_slope
|
| 194 |
+
|
| 195 |
+
def forward(self, input):
|
| 196 |
+
out = F.leaky_relu(input, negative_slope=self.negative_slope)
|
| 197 |
+
|
| 198 |
+
return out * math.sqrt(2)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class ModulatedConv2d(nn.Module):
|
| 202 |
+
def __init__(
|
| 203 |
+
self,
|
| 204 |
+
in_channel,
|
| 205 |
+
out_channel,
|
| 206 |
+
kernel_size,
|
| 207 |
+
style_dim,
|
| 208 |
+
demodulate=True,
|
| 209 |
+
upsample=False,
|
| 210 |
+
downsample=False,
|
| 211 |
+
blur_kernel=[1, 3, 3, 1],
|
| 212 |
+
device="cpu",
|
| 213 |
+
):
|
| 214 |
+
super().__init__()
|
| 215 |
+
|
| 216 |
+
self.eps = 1e-8
|
| 217 |
+
self.kernel_size = kernel_size
|
| 218 |
+
self.in_channel = in_channel
|
| 219 |
+
self.out_channel = out_channel
|
| 220 |
+
self.upsample = upsample
|
| 221 |
+
self.downsample = downsample
|
| 222 |
+
|
| 223 |
+
if upsample:
|
| 224 |
+
factor = 2
|
| 225 |
+
p = (len(blur_kernel) - factor) - (kernel_size - 1)
|
| 226 |
+
pad0 = (p + 1) // 2 + factor - 1
|
| 227 |
+
pad1 = p // 2 + 1
|
| 228 |
+
|
| 229 |
+
self.blur = Blur(
|
| 230 |
+
blur_kernel, pad=(pad0, pad1), upsample_factor=factor, device=device
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
if downsample:
|
| 234 |
+
factor = 2
|
| 235 |
+
p = (len(blur_kernel) - factor) + (kernel_size - 1)
|
| 236 |
+
pad0 = (p + 1) // 2
|
| 237 |
+
pad1 = p // 2
|
| 238 |
+
|
| 239 |
+
self.blur = Blur(blur_kernel, pad=(pad0, pad1), device=device)
|
| 240 |
+
|
| 241 |
+
fan_in = in_channel * kernel_size ** 2
|
| 242 |
+
self.scale = 1 / math.sqrt(fan_in)
|
| 243 |
+
self.padding = kernel_size // 2
|
| 244 |
+
|
| 245 |
+
self.weight = nn.Parameter(
|
| 246 |
+
torch.randn(1, out_channel, in_channel, kernel_size, kernel_size)
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
self.modulation = EqualLinear(style_dim, in_channel, bias_init=1)
|
| 250 |
+
|
| 251 |
+
self.demodulate = demodulate
|
| 252 |
+
|
| 253 |
+
def __repr__(self):
|
| 254 |
+
return (
|
| 255 |
+
f"{self.__class__.__name__}({self.in_channel}, {self.out_channel}, {self.kernel_size}, "
|
| 256 |
+
f"upsample={self.upsample}, downsample={self.downsample})"
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
def forward(self, input, style):
|
| 260 |
+
batch, in_channel, height, width = input.shape
|
| 261 |
+
|
| 262 |
+
style = self.modulation(style).view(batch, 1, in_channel, 1, 1)
|
| 263 |
+
weight = self.scale * self.weight * style
|
| 264 |
+
|
| 265 |
+
if self.demodulate:
|
| 266 |
+
demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + 1e-8)
|
| 267 |
+
weight = weight * demod.view(batch, self.out_channel, 1, 1, 1)
|
| 268 |
+
|
| 269 |
+
weight = weight.view(
|
| 270 |
+
batch * self.out_channel, in_channel, self.kernel_size, self.kernel_size
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
if self.upsample:
|
| 274 |
+
input = input.view(1, batch * in_channel, height, width)
|
| 275 |
+
weight = weight.view(
|
| 276 |
+
batch, self.out_channel, in_channel, self.kernel_size, self.kernel_size
|
| 277 |
+
)
|
| 278 |
+
weight = weight.transpose(1, 2).reshape(
|
| 279 |
+
batch * in_channel, self.out_channel, self.kernel_size, self.kernel_size
|
| 280 |
+
)
|
| 281 |
+
out = F.conv_transpose2d(input, weight, padding=0, stride=2, groups=batch)
|
| 282 |
+
_, _, height, width = out.shape
|
| 283 |
+
out = out.view(batch, self.out_channel, height, width)
|
| 284 |
+
out = self.blur(out)
|
| 285 |
+
|
| 286 |
+
elif self.downsample:
|
| 287 |
+
input = self.blur(input)
|
| 288 |
+
_, _, height, width = input.shape
|
| 289 |
+
input = input.view(1, batch * in_channel, height, width)
|
| 290 |
+
out = F.conv2d(input, weight, padding=0, stride=2, groups=batch)
|
| 291 |
+
_, _, height, width = out.shape
|
| 292 |
+
out = out.view(batch, self.out_channel, height, width)
|
| 293 |
+
|
| 294 |
+
else:
|
| 295 |
+
input = input.view(1, batch * in_channel, height, width)
|
| 296 |
+
out = F.conv2d(input, weight, padding=self.padding, groups=batch)
|
| 297 |
+
_, _, height, width = out.shape
|
| 298 |
+
out = out.view(batch, self.out_channel, height, width)
|
| 299 |
+
|
| 300 |
+
return out
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
class NoiseInjection(nn.Module):
|
| 304 |
+
def __init__(self, isconcat=True):
|
| 305 |
+
super().__init__()
|
| 306 |
+
|
| 307 |
+
self.isconcat = isconcat
|
| 308 |
+
self.weight = nn.Parameter(torch.zeros(1))
|
| 309 |
+
|
| 310 |
+
def forward(self, image, noise=None):
|
| 311 |
+
if noise is None:
|
| 312 |
+
batch, channel, height, width = image.shape
|
| 313 |
+
noise = image.new_empty(batch, channel, height, width).normal_()
|
| 314 |
+
|
| 315 |
+
if self.isconcat:
|
| 316 |
+
return torch.cat((image, self.weight * noise), dim=1)
|
| 317 |
+
else:
|
| 318 |
+
return image + self.weight * noise
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
class ConstantInput(nn.Module):
|
| 322 |
+
def __init__(self, channel, size=4):
|
| 323 |
+
super().__init__()
|
| 324 |
+
|
| 325 |
+
self.input = nn.Parameter(torch.randn(1, channel, size, size))
|
| 326 |
+
|
| 327 |
+
def forward(self, input):
|
| 328 |
+
batch = input.shape[0]
|
| 329 |
+
out = self.input.repeat(batch, 1, 1, 1)
|
| 330 |
+
|
| 331 |
+
return out
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
class StyledConv(nn.Module):
|
| 335 |
+
def __init__(
|
| 336 |
+
self,
|
| 337 |
+
in_channel,
|
| 338 |
+
out_channel,
|
| 339 |
+
kernel_size,
|
| 340 |
+
style_dim,
|
| 341 |
+
upsample=False,
|
| 342 |
+
blur_kernel=[1, 3, 3, 1],
|
| 343 |
+
demodulate=True,
|
| 344 |
+
isconcat=True,
|
| 345 |
+
device="cpu",
|
| 346 |
+
):
|
| 347 |
+
super().__init__()
|
| 348 |
+
|
| 349 |
+
self.conv = ModulatedConv2d(
|
| 350 |
+
in_channel,
|
| 351 |
+
out_channel,
|
| 352 |
+
kernel_size,
|
| 353 |
+
style_dim,
|
| 354 |
+
upsample=upsample,
|
| 355 |
+
blur_kernel=blur_kernel,
|
| 356 |
+
demodulate=demodulate,
|
| 357 |
+
device=device,
|
| 358 |
+
)
|
| 359 |
+
|
| 360 |
+
self.noise = NoiseInjection(isconcat)
|
| 361 |
+
# self.bias = nn.Parameter(torch.zeros(1, out_channel, 1, 1))
|
| 362 |
+
# self.activate = ScaledLeakyReLU(0.2)
|
| 363 |
+
feat_multiplier = 2 if isconcat else 1
|
| 364 |
+
self.activate = FusedLeakyReLU(out_channel * feat_multiplier, device=device)
|
| 365 |
+
|
| 366 |
+
def forward(self, input, style, noise=None):
|
| 367 |
+
out = self.conv(input, style)
|
| 368 |
+
out = self.noise(out, noise=noise)
|
| 369 |
+
# out = out + self.bias
|
| 370 |
+
out = self.activate(out)
|
| 371 |
+
|
| 372 |
+
return out
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class ToRGB(nn.Module):
|
| 376 |
+
def __init__(
|
| 377 |
+
self,
|
| 378 |
+
in_channel,
|
| 379 |
+
style_dim,
|
| 380 |
+
upsample=True,
|
| 381 |
+
blur_kernel=[1, 3, 3, 1],
|
| 382 |
+
device="cpu",
|
| 383 |
+
):
|
| 384 |
+
super().__init__()
|
| 385 |
+
|
| 386 |
+
if upsample:
|
| 387 |
+
self.upsample = Upsample(blur_kernel, device=device)
|
| 388 |
+
|
| 389 |
+
self.conv = ModulatedConv2d(
|
| 390 |
+
in_channel, 3, 1, style_dim, demodulate=False, device=device
|
| 391 |
+
)
|
| 392 |
+
self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1))
|
| 393 |
+
|
| 394 |
+
def forward(self, input, style, skip=None):
|
| 395 |
+
out = self.conv(input, style)
|
| 396 |
+
out = out + self.bias
|
| 397 |
+
|
| 398 |
+
if skip is not None:
|
| 399 |
+
skip = self.upsample(skip)
|
| 400 |
+
|
| 401 |
+
out = out + skip
|
| 402 |
+
|
| 403 |
+
return out
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
class Generator(nn.Module):
|
| 407 |
+
def __init__(
|
| 408 |
+
self,
|
| 409 |
+
size,
|
| 410 |
+
style_dim,
|
| 411 |
+
n_mlp,
|
| 412 |
+
channel_multiplier=2,
|
| 413 |
+
blur_kernel=[1, 3, 3, 1],
|
| 414 |
+
lr_mlp=0.01,
|
| 415 |
+
isconcat=True,
|
| 416 |
+
narrow=1,
|
| 417 |
+
device="cpu",
|
| 418 |
+
):
|
| 419 |
+
super().__init__()
|
| 420 |
+
|
| 421 |
+
self.size = size
|
| 422 |
+
self.n_mlp = n_mlp
|
| 423 |
+
self.style_dim = style_dim
|
| 424 |
+
self.feat_multiplier = 2 if isconcat else 1
|
| 425 |
+
|
| 426 |
+
layers = [PixelNorm()]
|
| 427 |
+
|
| 428 |
+
for i in range(n_mlp):
|
| 429 |
+
layers.append(
|
| 430 |
+
EqualLinear(
|
| 431 |
+
style_dim,
|
| 432 |
+
style_dim,
|
| 433 |
+
lr_mul=lr_mlp,
|
| 434 |
+
activation="fused_lrelu",
|
| 435 |
+
device=device,
|
| 436 |
+
)
|
| 437 |
+
)
|
| 438 |
+
|
| 439 |
+
self.style = nn.Sequential(*layers)
|
| 440 |
+
|
| 441 |
+
self.channels = {
|
| 442 |
+
4: int(512 * narrow),
|
| 443 |
+
8: int(512 * narrow),
|
| 444 |
+
16: int(512 * narrow),
|
| 445 |
+
32: int(512 * narrow),
|
| 446 |
+
64: int(256 * channel_multiplier * narrow),
|
| 447 |
+
128: int(128 * channel_multiplier * narrow),
|
| 448 |
+
256: int(64 * channel_multiplier * narrow),
|
| 449 |
+
512: int(32 * channel_multiplier * narrow),
|
| 450 |
+
1024: int(16 * channel_multiplier * narrow),
|
| 451 |
+
}
|
| 452 |
+
|
| 453 |
+
self.input = ConstantInput(self.channels[4])
|
| 454 |
+
self.conv1 = StyledConv(
|
| 455 |
+
self.channels[4],
|
| 456 |
+
self.channels[4],
|
| 457 |
+
3,
|
| 458 |
+
style_dim,
|
| 459 |
+
blur_kernel=blur_kernel,
|
| 460 |
+
isconcat=isconcat,
|
| 461 |
+
device=device,
|
| 462 |
+
)
|
| 463 |
+
self.to_rgb1 = ToRGB(
|
| 464 |
+
self.channels[4] * self.feat_multiplier,
|
| 465 |
+
style_dim,
|
| 466 |
+
upsample=False,
|
| 467 |
+
device=device,
|
| 468 |
+
)
|
| 469 |
+
|
| 470 |
+
self.log_size = int(math.log(size, 2))
|
| 471 |
+
|
| 472 |
+
self.convs = nn.ModuleList()
|
| 473 |
+
self.upsamples = nn.ModuleList()
|
| 474 |
+
self.to_rgbs = nn.ModuleList()
|
| 475 |
+
|
| 476 |
+
in_channel = self.channels[4]
|
| 477 |
+
|
| 478 |
+
for i in range(3, self.log_size + 1):
|
| 479 |
+
out_channel = self.channels[2 ** i]
|
| 480 |
+
|
| 481 |
+
self.convs.append(
|
| 482 |
+
StyledConv(
|
| 483 |
+
in_channel * self.feat_multiplier,
|
| 484 |
+
out_channel,
|
| 485 |
+
3,
|
| 486 |
+
style_dim,
|
| 487 |
+
upsample=True,
|
| 488 |
+
blur_kernel=blur_kernel,
|
| 489 |
+
isconcat=isconcat,
|
| 490 |
+
device=device,
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
|
| 494 |
+
self.convs.append(
|
| 495 |
+
StyledConv(
|
| 496 |
+
out_channel * self.feat_multiplier,
|
| 497 |
+
out_channel,
|
| 498 |
+
3,
|
| 499 |
+
style_dim,
|
| 500 |
+
blur_kernel=blur_kernel,
|
| 501 |
+
isconcat=isconcat,
|
| 502 |
+
device=device,
|
| 503 |
+
)
|
| 504 |
+
)
|
| 505 |
+
|
| 506 |
+
self.to_rgbs.append(
|
| 507 |
+
ToRGB(out_channel * self.feat_multiplier, style_dim, device=device)
|
| 508 |
+
)
|
| 509 |
+
|
| 510 |
+
in_channel = out_channel
|
| 511 |
+
|
| 512 |
+
self.n_latent = self.log_size * 2 - 2
|
| 513 |
+
|
| 514 |
+
def make_noise(self):
|
| 515 |
+
device = self.input.input.device
|
| 516 |
+
|
| 517 |
+
noises = [torch.randn(1, 1, 2 ** 2, 2 ** 2, device=device)]
|
| 518 |
+
|
| 519 |
+
for i in range(3, self.log_size + 1):
|
| 520 |
+
for _ in range(2):
|
| 521 |
+
noises.append(torch.randn(1, 1, 2 ** i, 2 ** i, device=device))
|
| 522 |
+
|
| 523 |
+
return noises
|
| 524 |
+
|
| 525 |
+
def mean_latent(self, n_latent):
|
| 526 |
+
latent_in = torch.randn(
|
| 527 |
+
n_latent, self.style_dim, device=self.input.input.device
|
| 528 |
+
)
|
| 529 |
+
latent = self.style(latent_in).mean(0, keepdim=True)
|
| 530 |
+
|
| 531 |
+
return latent
|
| 532 |
+
|
| 533 |
+
def get_latent(self, input):
|
| 534 |
+
return self.style(input)
|
| 535 |
+
|
| 536 |
+
def forward(
|
| 537 |
+
self,
|
| 538 |
+
styles,
|
| 539 |
+
return_latents=False,
|
| 540 |
+
inject_index=None,
|
| 541 |
+
truncation=1,
|
| 542 |
+
truncation_latent=None,
|
| 543 |
+
input_is_latent=False,
|
| 544 |
+
noise=None,
|
| 545 |
+
):
|
| 546 |
+
if not input_is_latent:
|
| 547 |
+
styles = [self.style(s) for s in styles]
|
| 548 |
+
|
| 549 |
+
if noise is None:
|
| 550 |
+
"""
|
| 551 |
+
noise = [None] * (2 * (self.log_size - 2) + 1)
|
| 552 |
+
"""
|
| 553 |
+
noise = []
|
| 554 |
+
batch = styles[0].shape[0]
|
| 555 |
+
for i in range(self.n_mlp + 1):
|
| 556 |
+
size = 2 ** (i + 2)
|
| 557 |
+
noise.append(
|
| 558 |
+
torch.randn(
|
| 559 |
+
batch, self.channels[size], size, size, device=styles[0].device
|
| 560 |
+
)
|
| 561 |
+
)
|
| 562 |
+
|
| 563 |
+
if truncation < 1:
|
| 564 |
+
style_t = []
|
| 565 |
+
|
| 566 |
+
for style in styles:
|
| 567 |
+
style_t.append(
|
| 568 |
+
truncation_latent + truncation * (style - truncation_latent)
|
| 569 |
+
)
|
| 570 |
+
|
| 571 |
+
styles = style_t
|
| 572 |
+
|
| 573 |
+
if len(styles) < 2:
|
| 574 |
+
inject_index = self.n_latent
|
| 575 |
+
|
| 576 |
+
latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1)
|
| 577 |
+
|
| 578 |
+
else:
|
| 579 |
+
if inject_index is None:
|
| 580 |
+
inject_index = random.randint(1, self.n_latent - 1)
|
| 581 |
+
|
| 582 |
+
latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1)
|
| 583 |
+
latent2 = styles[1].unsqueeze(1).repeat(1, self.n_latent - inject_index, 1)
|
| 584 |
+
|
| 585 |
+
latent = torch.cat([latent, latent2], 1)
|
| 586 |
+
|
| 587 |
+
out = self.input(latent)
|
| 588 |
+
out = self.conv1(out, latent[:, 0], noise=noise[0])
|
| 589 |
+
|
| 590 |
+
skip = self.to_rgb1(out, latent[:, 1])
|
| 591 |
+
|
| 592 |
+
i = 1
|
| 593 |
+
for conv1, conv2, noise1, noise2, to_rgb in zip(
|
| 594 |
+
self.convs[::2], self.convs[1::2], noise[1::2], noise[2::2], self.to_rgbs
|
| 595 |
+
):
|
| 596 |
+
out = conv1(out, latent[:, i], noise=noise1)
|
| 597 |
+
out = conv2(out, latent[:, i + 1], noise=noise2)
|
| 598 |
+
skip = to_rgb(out, latent[:, i + 2], skip)
|
| 599 |
+
|
| 600 |
+
i += 2
|
| 601 |
+
|
| 602 |
+
image = skip
|
| 603 |
+
|
| 604 |
+
if return_latents:
|
| 605 |
+
return image, latent
|
| 606 |
+
|
| 607 |
+
else:
|
| 608 |
+
# return image, None
|
| 609 |
+
return image
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
class ConvLayer(nn.Sequential):
|
| 613 |
+
def __init__(
|
| 614 |
+
self,
|
| 615 |
+
in_channel,
|
| 616 |
+
out_channel,
|
| 617 |
+
kernel_size,
|
| 618 |
+
downsample=False,
|
| 619 |
+
blur_kernel=[1, 3, 3, 1],
|
| 620 |
+
bias=True,
|
| 621 |
+
activate=True,
|
| 622 |
+
device="cpu",
|
| 623 |
+
):
|
| 624 |
+
layers = []
|
| 625 |
+
|
| 626 |
+
if downsample:
|
| 627 |
+
factor = 2
|
| 628 |
+
p = (len(blur_kernel) - factor) + (kernel_size - 1)
|
| 629 |
+
pad0 = (p + 1) // 2
|
| 630 |
+
pad1 = p // 2
|
| 631 |
+
|
| 632 |
+
layers.append(Blur(blur_kernel, pad=(pad0, pad1), device=device))
|
| 633 |
+
|
| 634 |
+
stride = 2
|
| 635 |
+
self.padding = 0
|
| 636 |
+
|
| 637 |
+
else:
|
| 638 |
+
stride = 1
|
| 639 |
+
self.padding = kernel_size // 2
|
| 640 |
+
|
| 641 |
+
layers.append(
|
| 642 |
+
EqualConv2d(
|
| 643 |
+
in_channel,
|
| 644 |
+
out_channel,
|
| 645 |
+
kernel_size,
|
| 646 |
+
padding=self.padding,
|
| 647 |
+
stride=stride,
|
| 648 |
+
bias=bias and not activate,
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
|
| 652 |
+
if activate:
|
| 653 |
+
if bias:
|
| 654 |
+
layers.append(FusedLeakyReLU(out_channel, device=device))
|
| 655 |
+
|
| 656 |
+
else:
|
| 657 |
+
layers.append(ScaledLeakyReLU(0.2))
|
| 658 |
+
|
| 659 |
+
super().__init__(*layers)
|
| 660 |
+
|
| 661 |
+
|
| 662 |
+
class ResBlock(nn.Module):
|
| 663 |
+
def __init__(self, in_channel, out_channel, blur_kernel=[1, 3, 3, 1], device="cpu"):
|
| 664 |
+
super().__init__()
|
| 665 |
+
|
| 666 |
+
self.conv1 = ConvLayer(in_channel, in_channel, 3, device=device)
|
| 667 |
+
self.conv2 = ConvLayer(
|
| 668 |
+
in_channel, out_channel, 3, downsample=True, device=device
|
| 669 |
+
)
|
| 670 |
+
|
| 671 |
+
self.skip = ConvLayer(
|
| 672 |
+
in_channel, out_channel, 1, downsample=True, activate=False, bias=False
|
| 673 |
+
)
|
| 674 |
+
|
| 675 |
+
def forward(self, input):
|
| 676 |
+
out = self.conv1(input)
|
| 677 |
+
out = self.conv2(out)
|
| 678 |
+
|
| 679 |
+
skip = self.skip(input)
|
| 680 |
+
out = (out + skip) / math.sqrt(2)
|
| 681 |
+
|
| 682 |
+
return out
|
| 683 |
+
|
| 684 |
+
|
| 685 |
+
class FullGenerator(nn.Module):
|
| 686 |
+
def __init__(
|
| 687 |
+
self,
|
| 688 |
+
size,
|
| 689 |
+
style_dim,
|
| 690 |
+
n_mlp,
|
| 691 |
+
channel_multiplier=2,
|
| 692 |
+
blur_kernel=[1, 3, 3, 1],
|
| 693 |
+
lr_mlp=0.01,
|
| 694 |
+
isconcat=True,
|
| 695 |
+
narrow=1,
|
| 696 |
+
device="cpu",
|
| 697 |
+
):
|
| 698 |
+
super().__init__()
|
| 699 |
+
channels = {
|
| 700 |
+
4: int(512 * narrow),
|
| 701 |
+
8: int(512 * narrow),
|
| 702 |
+
16: int(512 * narrow),
|
| 703 |
+
32: int(512 * narrow),
|
| 704 |
+
64: int(256 * channel_multiplier * narrow),
|
| 705 |
+
128: int(128 * channel_multiplier * narrow),
|
| 706 |
+
256: int(64 * channel_multiplier * narrow),
|
| 707 |
+
512: int(32 * channel_multiplier * narrow),
|
| 708 |
+
1024: int(16 * channel_multiplier * narrow),
|
| 709 |
+
}
|
| 710 |
+
|
| 711 |
+
self.log_size = int(math.log(size, 2))
|
| 712 |
+
self.generator = Generator(
|
| 713 |
+
size,
|
| 714 |
+
style_dim,
|
| 715 |
+
n_mlp,
|
| 716 |
+
channel_multiplier=channel_multiplier,
|
| 717 |
+
blur_kernel=blur_kernel,
|
| 718 |
+
lr_mlp=lr_mlp,
|
| 719 |
+
isconcat=isconcat,
|
| 720 |
+
narrow=narrow,
|
| 721 |
+
device=device,
|
| 722 |
+
)
|
| 723 |
+
|
| 724 |
+
conv = [ConvLayer(3, channels[size], 1, device=device)]
|
| 725 |
+
self.ecd0 = nn.Sequential(*conv)
|
| 726 |
+
in_channel = channels[size]
|
| 727 |
+
|
| 728 |
+
self.names = ["ecd%d" % i for i in range(self.log_size - 1)]
|
| 729 |
+
for i in range(self.log_size, 2, -1):
|
| 730 |
+
out_channel = channels[2 ** (i - 1)]
|
| 731 |
+
# conv = [ResBlock(in_channel, out_channel, blur_kernel)]
|
| 732 |
+
conv = [
|
| 733 |
+
ConvLayer(in_channel, out_channel, 3, downsample=True, device=device)
|
| 734 |
+
]
|
| 735 |
+
setattr(self, self.names[self.log_size - i + 1], nn.Sequential(*conv))
|
| 736 |
+
in_channel = out_channel
|
| 737 |
+
self.final_linear = nn.Sequential(
|
| 738 |
+
EqualLinear(
|
| 739 |
+
channels[4] * 4 * 4, style_dim, activation="fused_lrelu", device=device
|
| 740 |
+
)
|
| 741 |
+
)
|
| 742 |
+
|
| 743 |
+
def forward(
|
| 744 |
+
self,
|
| 745 |
+
inputs,
|
| 746 |
+
return_latents=True,
|
| 747 |
+
inject_index=None,
|
| 748 |
+
truncation=1,
|
| 749 |
+
truncation_latent=None,
|
| 750 |
+
input_is_latent=False,
|
| 751 |
+
):
|
| 752 |
+
noise = []
|
| 753 |
+
for i in range(self.log_size - 1):
|
| 754 |
+
ecd = getattr(self, self.names[i])
|
| 755 |
+
inputs = ecd(inputs)
|
| 756 |
+
noise.append(inputs)
|
| 757 |
+
# print(inputs.shape)
|
| 758 |
+
inputs = inputs.view(inputs.shape[0], -1)
|
| 759 |
+
outs = self.final_linear(inputs)
|
| 760 |
+
# print(outs.shape)
|
| 761 |
+
noise = list(
|
| 762 |
+
itertools.chain.from_iterable(itertools.repeat(x, 2) for x in noise)
|
| 763 |
+
)[::-1]
|
| 764 |
+
outs = self.generator(
|
| 765 |
+
[outs],
|
| 766 |
+
return_latents,
|
| 767 |
+
inject_index,
|
| 768 |
+
truncation,
|
| 769 |
+
truncation_latent,
|
| 770 |
+
input_is_latent,
|
| 771 |
+
noise=noise[1:],
|
| 772 |
+
)
|
| 773 |
+
return outs
|
| 774 |
+
|
| 775 |
+
|
| 776 |
+
class Discriminator(nn.Module):
|
| 777 |
+
def __init__(
|
| 778 |
+
self,
|
| 779 |
+
size,
|
| 780 |
+
channel_multiplier=2,
|
| 781 |
+
blur_kernel=[1, 3, 3, 1],
|
| 782 |
+
narrow=1,
|
| 783 |
+
device="cpu",
|
| 784 |
+
):
|
| 785 |
+
super().__init__()
|
| 786 |
+
|
| 787 |
+
channels = {
|
| 788 |
+
4: int(512 * narrow),
|
| 789 |
+
8: int(512 * narrow),
|
| 790 |
+
16: int(512 * narrow),
|
| 791 |
+
32: int(512 * narrow),
|
| 792 |
+
64: int(256 * channel_multiplier * narrow),
|
| 793 |
+
128: int(128 * channel_multiplier * narrow),
|
| 794 |
+
256: int(64 * channel_multiplier * narrow),
|
| 795 |
+
512: int(32 * channel_multiplier * narrow),
|
| 796 |
+
1024: int(16 * channel_multiplier * narrow),
|
| 797 |
+
}
|
| 798 |
+
|
| 799 |
+
convs = [ConvLayer(3, channels[size], 1, device=device)]
|
| 800 |
+
|
| 801 |
+
log_size = int(math.log(size, 2))
|
| 802 |
+
|
| 803 |
+
in_channel = channels[size]
|
| 804 |
+
|
| 805 |
+
for i in range(log_size, 2, -1):
|
| 806 |
+
out_channel = channels[2 ** (i - 1)]
|
| 807 |
+
|
| 808 |
+
convs.append(ResBlock(in_channel, out_channel, blur_kernel, device=device))
|
| 809 |
+
|
| 810 |
+
in_channel = out_channel
|
| 811 |
+
|
| 812 |
+
self.convs = nn.Sequential(*convs)
|
| 813 |
+
|
| 814 |
+
self.stddev_group = 4
|
| 815 |
+
self.stddev_feat = 1
|
| 816 |
+
|
| 817 |
+
self.final_conv = ConvLayer(in_channel + 1, channels[4], 3, device=device)
|
| 818 |
+
self.final_linear = nn.Sequential(
|
| 819 |
+
EqualLinear(
|
| 820 |
+
channels[4] * 4 * 4,
|
| 821 |
+
channels[4],
|
| 822 |
+
activation="fused_lrelu",
|
| 823 |
+
device=device,
|
| 824 |
+
),
|
| 825 |
+
EqualLinear(channels[4], 1),
|
| 826 |
+
)
|
| 827 |
+
|
| 828 |
+
def forward(self, input):
|
| 829 |
+
out = self.convs(input)
|
| 830 |
+
|
| 831 |
+
batch, channel, height, width = out.shape
|
| 832 |
+
group = min(batch, self.stddev_group)
|
| 833 |
+
stddev = out.view(
|
| 834 |
+
group, -1, self.stddev_feat, channel // self.stddev_feat, height, width
|
| 835 |
+
)
|
| 836 |
+
stddev = torch.sqrt(stddev.var(0, unbiased=False) + 1e-8)
|
| 837 |
+
stddev = stddev.mean([2, 3, 4], keepdims=True).squeeze(2)
|
| 838 |
+
stddev = stddev.repeat(group, 1, height, width)
|
| 839 |
+
out = torch.cat([out, stddev], 1)
|
| 840 |
+
|
| 841 |
+
out = self.final_conv(out)
|
| 842 |
+
|
| 843 |
+
out = out.view(batch, -1)
|
| 844 |
+
out = self.final_linear(out)
|
| 845 |
+
return out
|
| 846 |
+
|
| 847 |
+
|
| 848 |
+
class FullGenerator_SR(nn.Module):
|
| 849 |
+
def __init__(
|
| 850 |
+
self,
|
| 851 |
+
in_size,
|
| 852 |
+
out_size,
|
| 853 |
+
style_dim,
|
| 854 |
+
n_mlp,
|
| 855 |
+
channel_multiplier=2,
|
| 856 |
+
blur_kernel=[1, 3, 3, 1],
|
| 857 |
+
lr_mlp=0.01,
|
| 858 |
+
isconcat=True,
|
| 859 |
+
narrow=1,
|
| 860 |
+
device="cpu",
|
| 861 |
+
):
|
| 862 |
+
super().__init__()
|
| 863 |
+
channels = {
|
| 864 |
+
4: int(512 * narrow),
|
| 865 |
+
8: int(512 * narrow),
|
| 866 |
+
16: int(512 * narrow),
|
| 867 |
+
32: int(512 * narrow),
|
| 868 |
+
64: int(256 * channel_multiplier * narrow),
|
| 869 |
+
128: int(128 * channel_multiplier * narrow),
|
| 870 |
+
256: int(64 * channel_multiplier * narrow),
|
| 871 |
+
512: int(32 * channel_multiplier * narrow),
|
| 872 |
+
1024: int(16 * channel_multiplier * narrow),
|
| 873 |
+
2048: int(8 * channel_multiplier * narrow),
|
| 874 |
+
}
|
| 875 |
+
|
| 876 |
+
self.log_insize = int(math.log(in_size, 2))
|
| 877 |
+
self.log_outsize = int(math.log(out_size, 2))
|
| 878 |
+
self.generator = Generator(
|
| 879 |
+
out_size,
|
| 880 |
+
style_dim,
|
| 881 |
+
n_mlp,
|
| 882 |
+
channel_multiplier=channel_multiplier,
|
| 883 |
+
blur_kernel=blur_kernel,
|
| 884 |
+
lr_mlp=lr_mlp,
|
| 885 |
+
isconcat=isconcat,
|
| 886 |
+
narrow=narrow,
|
| 887 |
+
device=device,
|
| 888 |
+
)
|
| 889 |
+
|
| 890 |
+
conv = [ConvLayer(3, channels[in_size], 1, device=device)]
|
| 891 |
+
self.ecd0 = nn.Sequential(*conv)
|
| 892 |
+
in_channel = channels[in_size]
|
| 893 |
+
|
| 894 |
+
self.names = ["ecd%d" % i for i in range(self.log_insize - 1)]
|
| 895 |
+
for i in range(self.log_insize, 2, -1):
|
| 896 |
+
out_channel = channels[2 ** (i - 1)]
|
| 897 |
+
# conv = [ResBlock(in_channel, out_channel, blur_kernel)]
|
| 898 |
+
conv = [
|
| 899 |
+
ConvLayer(in_channel, out_channel, 3, downsample=True, device=device)
|
| 900 |
+
]
|
| 901 |
+
setattr(self, self.names[self.log_insize - i + 1], nn.Sequential(*conv))
|
| 902 |
+
in_channel = out_channel
|
| 903 |
+
self.final_linear = nn.Sequential(
|
| 904 |
+
EqualLinear(
|
| 905 |
+
channels[4] * 4 * 4, style_dim, activation="fused_lrelu", device=device
|
| 906 |
+
)
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
def forward(
|
| 910 |
+
self,
|
| 911 |
+
inputs,
|
| 912 |
+
return_latents=False,
|
| 913 |
+
inject_index=None,
|
| 914 |
+
truncation=1,
|
| 915 |
+
truncation_latent=None,
|
| 916 |
+
input_is_latent=False,
|
| 917 |
+
):
|
| 918 |
+
noise = []
|
| 919 |
+
for i in range(self.log_outsize - self.log_insize):
|
| 920 |
+
noise.append(None)
|
| 921 |
+
for i in range(self.log_insize - 1):
|
| 922 |
+
ecd = getattr(self, self.names[i])
|
| 923 |
+
inputs = ecd(inputs)
|
| 924 |
+
noise.append(inputs)
|
| 925 |
+
# print(inputs.shape)
|
| 926 |
+
inputs = inputs.view(inputs.shape[0], -1)
|
| 927 |
+
outs = self.final_linear(inputs)
|
| 928 |
+
# print(outs.shape)
|
| 929 |
+
noise = list(
|
| 930 |
+
itertools.chain.from_iterable(itertools.repeat(x, 2) for x in noise)
|
| 931 |
+
)[::-1]
|
| 932 |
+
image, latent = self.generator(
|
| 933 |
+
[outs],
|
| 934 |
+
return_latents,
|
| 935 |
+
inject_index,
|
| 936 |
+
truncation,
|
| 937 |
+
truncation_latent,
|
| 938 |
+
input_is_latent,
|
| 939 |
+
noise=noise[1:],
|
| 940 |
+
)
|
| 941 |
+
return image, latent
|
third_party/GPEN/face_model/op/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .fused_act import FusedLeakyReLU, fused_leaky_relu
|
| 2 |
+
from .upfirdn2d import upfirdn2d
|
third_party/GPEN/face_model/op/fused_act.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import platform
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
from torch import nn
|
| 6 |
+
import torch.nn.functional as F
|
| 7 |
+
from torch.autograd import Function
|
| 8 |
+
from torch.utils.cpp_extension import load, _import_module_from_library
|
| 9 |
+
|
| 10 |
+
# if running GPEN without cuda, please comment line 11-19
|
| 11 |
+
if platform.system() == 'Linux' and torch.cuda.is_available():
|
| 12 |
+
module_path = os.path.dirname(__file__)
|
| 13 |
+
fused = load(
|
| 14 |
+
'fused',
|
| 15 |
+
sources=[
|
| 16 |
+
os.path.join(module_path, 'fused_bias_act.cpp'),
|
| 17 |
+
os.path.join(module_path, 'fused_bias_act_kernel.cu'),
|
| 18 |
+
],
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
#fused = _import_module_from_library('fused', '/tmp/torch_extensions/fused', True)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class FusedLeakyReLUFunctionBackward(Function):
|
| 26 |
+
@staticmethod
|
| 27 |
+
def forward(ctx, grad_output, out, negative_slope, scale):
|
| 28 |
+
ctx.save_for_backward(out)
|
| 29 |
+
ctx.negative_slope = negative_slope
|
| 30 |
+
ctx.scale = scale
|
| 31 |
+
|
| 32 |
+
empty = grad_output.new_empty(0)
|
| 33 |
+
|
| 34 |
+
grad_input = fused.fused_bias_act(
|
| 35 |
+
grad_output, empty, out, 3, 1, negative_slope, scale
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
dim = [0]
|
| 39 |
+
|
| 40 |
+
if grad_input.ndim > 2:
|
| 41 |
+
dim += list(range(2, grad_input.ndim))
|
| 42 |
+
|
| 43 |
+
grad_bias = grad_input.sum(dim).detach()
|
| 44 |
+
|
| 45 |
+
return grad_input, grad_bias
|
| 46 |
+
|
| 47 |
+
@staticmethod
|
| 48 |
+
def backward(ctx, gradgrad_input, gradgrad_bias):
|
| 49 |
+
out, = ctx.saved_tensors
|
| 50 |
+
gradgrad_out = fused.fused_bias_act(
|
| 51 |
+
gradgrad_input, gradgrad_bias, out, 3, 1, ctx.negative_slope, ctx.scale
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
return gradgrad_out, None, None, None
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class FusedLeakyReLUFunction(Function):
|
| 58 |
+
@staticmethod
|
| 59 |
+
def forward(ctx, input, bias, negative_slope, scale):
|
| 60 |
+
empty = input.new_empty(0)
|
| 61 |
+
out = fused.fused_bias_act(input, bias, empty, 3, 0, negative_slope, scale)
|
| 62 |
+
ctx.save_for_backward(out)
|
| 63 |
+
ctx.negative_slope = negative_slope
|
| 64 |
+
ctx.scale = scale
|
| 65 |
+
|
| 66 |
+
return out
|
| 67 |
+
|
| 68 |
+
@staticmethod
|
| 69 |
+
def backward(ctx, grad_output):
|
| 70 |
+
out, = ctx.saved_tensors
|
| 71 |
+
|
| 72 |
+
grad_input, grad_bias = FusedLeakyReLUFunctionBackward.apply(
|
| 73 |
+
grad_output, out, ctx.negative_slope, ctx.scale
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
return grad_input, grad_bias, None, None
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class FusedLeakyReLU(nn.Module):
|
| 80 |
+
def __init__(self, channel, negative_slope=0.2, scale=2 ** 0.5, device='cpu'):
|
| 81 |
+
super().__init__()
|
| 82 |
+
|
| 83 |
+
self.bias = nn.Parameter(torch.zeros(channel))
|
| 84 |
+
self.negative_slope = negative_slope
|
| 85 |
+
self.scale = scale
|
| 86 |
+
self.device = device
|
| 87 |
+
|
| 88 |
+
def forward(self, input):
|
| 89 |
+
return fused_leaky_relu(input, self.bias, self.negative_slope, self.scale, self.device)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def fused_leaky_relu(input, bias, negative_slope=0.2, scale=2 ** 0.5, device='cpu'):
|
| 93 |
+
if platform.system() == 'Linux' and torch.cuda.is_available() and device != 'cpu':
|
| 94 |
+
return FusedLeakyReLUFunction.apply(input, bias, negative_slope, scale)
|
| 95 |
+
else:
|
| 96 |
+
return scale * F.leaky_relu(input + bias.view((1, -1)+(1,)*(len(input.shape)-2)), negative_slope=negative_slope)
|
third_party/GPEN/face_model/op/fused_bias_act.cpp
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#include <torch/extension.h>
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
torch::Tensor fused_bias_act_op(const torch::Tensor& input, const torch::Tensor& bias, const torch::Tensor& refer,
|
| 5 |
+
int act, int grad, float alpha, float scale);
|
| 6 |
+
|
| 7 |
+
#define CHECK_CUDA(x) TORCH_CHECK(x.type().is_cuda(), #x " must be a CUDA tensor")
|
| 8 |
+
#define CHECK_CONTIGUOUS(x) TORCH_CHECK(x.is_contiguous(), #x " must be contiguous")
|
| 9 |
+
#define CHECK_INPUT(x) CHECK_CUDA(x); CHECK_CONTIGUOUS(x)
|
| 10 |
+
|
| 11 |
+
torch::Tensor fused_bias_act(const torch::Tensor& input, const torch::Tensor& bias, const torch::Tensor& refer,
|
| 12 |
+
int act, int grad, float alpha, float scale) {
|
| 13 |
+
CHECK_CUDA(input);
|
| 14 |
+
CHECK_CUDA(bias);
|
| 15 |
+
|
| 16 |
+
return fused_bias_act_op(input, bias, refer, act, grad, alpha, scale);
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
|
| 20 |
+
m.def("fused_bias_act", &fused_bias_act, "fused bias act (CUDA)");
|
| 21 |
+
}
|
third_party/GPEN/face_model/op/fused_bias_act_kernel.cu
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Copyright (c) 2019, NVIDIA Corporation. All rights reserved.
|
| 2 |
+
//
|
| 3 |
+
// This work is made available under the Nvidia Source Code License-NC.
|
| 4 |
+
// To view a copy of this license, visit
|
| 5 |
+
// https://nvlabs.github.io/stylegan2/license.html
|
| 6 |
+
|
| 7 |
+
#include <torch/types.h>
|
| 8 |
+
|
| 9 |
+
#include <ATen/ATen.h>
|
| 10 |
+
#include <ATen/AccumulateType.h>
|
| 11 |
+
#include <ATen/cuda/CUDAContext.h>
|
| 12 |
+
#include <ATen/cuda/CUDAApplyUtils.cuh>
|
| 13 |
+
|
| 14 |
+
#include <cuda.h>
|
| 15 |
+
#include <cuda_runtime.h>
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
template <typename scalar_t>
|
| 19 |
+
static __global__ void fused_bias_act_kernel(scalar_t* out, const scalar_t* p_x, const scalar_t* p_b, const scalar_t* p_ref,
|
| 20 |
+
int act, int grad, scalar_t alpha, scalar_t scale, int loop_x, int size_x, int step_b, int size_b, int use_bias, int use_ref) {
|
| 21 |
+
int xi = blockIdx.x * loop_x * blockDim.x + threadIdx.x;
|
| 22 |
+
|
| 23 |
+
scalar_t zero = 0.0;
|
| 24 |
+
|
| 25 |
+
for (int loop_idx = 0; loop_idx < loop_x && xi < size_x; loop_idx++, xi += blockDim.x) {
|
| 26 |
+
scalar_t x = p_x[xi];
|
| 27 |
+
|
| 28 |
+
if (use_bias) {
|
| 29 |
+
x += p_b[(xi / step_b) % size_b];
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
scalar_t ref = use_ref ? p_ref[xi] : zero;
|
| 33 |
+
|
| 34 |
+
scalar_t y;
|
| 35 |
+
|
| 36 |
+
switch (act * 10 + grad) {
|
| 37 |
+
default:
|
| 38 |
+
case 10: y = x; break;
|
| 39 |
+
case 11: y = x; break;
|
| 40 |
+
case 12: y = 0.0; break;
|
| 41 |
+
|
| 42 |
+
case 30: y = (x > 0.0) ? x : x * alpha; break;
|
| 43 |
+
case 31: y = (ref > 0.0) ? x : x * alpha; break;
|
| 44 |
+
case 32: y = 0.0; break;
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
out[xi] = y * scale;
|
| 48 |
+
}
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
torch::Tensor fused_bias_act_op(const torch::Tensor& input, const torch::Tensor& bias, const torch::Tensor& refer,
|
| 53 |
+
int act, int grad, float alpha, float scale) {
|
| 54 |
+
int curDevice = -1;
|
| 55 |
+
cudaGetDevice(&curDevice);
|
| 56 |
+
cudaStream_t stream = at::cuda::getCurrentCUDAStream(curDevice);
|
| 57 |
+
|
| 58 |
+
auto x = input.contiguous();
|
| 59 |
+
auto b = bias.contiguous();
|
| 60 |
+
auto ref = refer.contiguous();
|
| 61 |
+
|
| 62 |
+
int use_bias = b.numel() ? 1 : 0;
|
| 63 |
+
int use_ref = ref.numel() ? 1 : 0;
|
| 64 |
+
|
| 65 |
+
int size_x = x.numel();
|
| 66 |
+
int size_b = b.numel();
|
| 67 |
+
int step_b = 1;
|
| 68 |
+
|
| 69 |
+
for (int i = 1 + 1; i < x.dim(); i++) {
|
| 70 |
+
step_b *= x.size(i);
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
int loop_x = 4;
|
| 74 |
+
int block_size = 4 * 32;
|
| 75 |
+
int grid_size = (size_x - 1) / (loop_x * block_size) + 1;
|
| 76 |
+
|
| 77 |
+
auto y = torch::empty_like(x);
|
| 78 |
+
|
| 79 |
+
AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "fused_bias_act_kernel", [&] {
|
| 80 |
+
fused_bias_act_kernel<scalar_t><<<grid_size, block_size, 0, stream>>>(
|
| 81 |
+
y.data_ptr<scalar_t>(),
|
| 82 |
+
x.data_ptr<scalar_t>(),
|
| 83 |
+
b.data_ptr<scalar_t>(),
|
| 84 |
+
ref.data_ptr<scalar_t>(),
|
| 85 |
+
act,
|
| 86 |
+
grad,
|
| 87 |
+
alpha,
|
| 88 |
+
scale,
|
| 89 |
+
loop_x,
|
| 90 |
+
size_x,
|
| 91 |
+
step_b,
|
| 92 |
+
size_b,
|
| 93 |
+
use_bias,
|
| 94 |
+
use_ref
|
| 95 |
+
);
|
| 96 |
+
});
|
| 97 |
+
|
| 98 |
+
return y;
|
| 99 |
+
}
|
third_party/GPEN/face_model/op/upfirdn2d.cpp
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#include <torch/extension.h>
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
torch::Tensor upfirdn2d_op(const torch::Tensor& input, const torch::Tensor& kernel,
|
| 5 |
+
int up_x, int up_y, int down_x, int down_y,
|
| 6 |
+
int pad_x0, int pad_x1, int pad_y0, int pad_y1);
|
| 7 |
+
|
| 8 |
+
#define CHECK_CUDA(x) TORCH_CHECK(x.type().is_cuda(), #x " must be a CUDA tensor")
|
| 9 |
+
#define CHECK_CONTIGUOUS(x) TORCH_CHECK(x.is_contiguous(), #x " must be contiguous")
|
| 10 |
+
#define CHECK_INPUT(x) CHECK_CUDA(x); CHECK_CONTIGUOUS(x)
|
| 11 |
+
|
| 12 |
+
torch::Tensor upfirdn2d(const torch::Tensor& input, const torch::Tensor& kernel,
|
| 13 |
+
int up_x, int up_y, int down_x, int down_y,
|
| 14 |
+
int pad_x0, int pad_x1, int pad_y0, int pad_y1) {
|
| 15 |
+
CHECK_CUDA(input);
|
| 16 |
+
CHECK_CUDA(kernel);
|
| 17 |
+
|
| 18 |
+
return upfirdn2d_op(input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1);
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
|
| 22 |
+
m.def("upfirdn2d", &upfirdn2d, "upfirdn2d (CUDA)");
|
| 23 |
+
}
|
third_party/GPEN/face_model/op/upfirdn2d.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import platform
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
from torch.autograd import Function
|
| 7 |
+
from torch.utils.cpp_extension import load, _import_module_from_library
|
| 8 |
+
|
| 9 |
+
# if running GPEN without cuda, please comment line 10-18
|
| 10 |
+
if platform.system() == 'Linux' and torch.cuda.is_available():
|
| 11 |
+
module_path = os.path.dirname(__file__)
|
| 12 |
+
upfirdn2d_op = load(
|
| 13 |
+
'upfirdn2d',
|
| 14 |
+
sources=[
|
| 15 |
+
os.path.join(module_path, 'upfirdn2d.cpp'),
|
| 16 |
+
os.path.join(module_path, 'upfirdn2d_kernel.cu'),
|
| 17 |
+
],
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
#upfirdn2d_op = _import_module_from_library('upfirdn2d', '/tmp/torch_extensions/upfirdn2d', True)
|
| 22 |
+
|
| 23 |
+
class UpFirDn2dBackward(Function):
|
| 24 |
+
@staticmethod
|
| 25 |
+
def forward(
|
| 26 |
+
ctx, grad_output, kernel, grad_kernel, up, down, pad, g_pad, in_size, out_size
|
| 27 |
+
):
|
| 28 |
+
|
| 29 |
+
up_x, up_y = up
|
| 30 |
+
down_x, down_y = down
|
| 31 |
+
g_pad_x0, g_pad_x1, g_pad_y0, g_pad_y1 = g_pad
|
| 32 |
+
|
| 33 |
+
grad_output = grad_output.reshape(-1, out_size[0], out_size[1], 1)
|
| 34 |
+
|
| 35 |
+
grad_input = upfirdn2d_op.upfirdn2d(
|
| 36 |
+
grad_output,
|
| 37 |
+
grad_kernel,
|
| 38 |
+
down_x,
|
| 39 |
+
down_y,
|
| 40 |
+
up_x,
|
| 41 |
+
up_y,
|
| 42 |
+
g_pad_x0,
|
| 43 |
+
g_pad_x1,
|
| 44 |
+
g_pad_y0,
|
| 45 |
+
g_pad_y1,
|
| 46 |
+
)
|
| 47 |
+
grad_input = grad_input.view(in_size[0], in_size[1], in_size[2], in_size[3])
|
| 48 |
+
|
| 49 |
+
ctx.save_for_backward(kernel)
|
| 50 |
+
|
| 51 |
+
pad_x0, pad_x1, pad_y0, pad_y1 = pad
|
| 52 |
+
|
| 53 |
+
ctx.up_x = up_x
|
| 54 |
+
ctx.up_y = up_y
|
| 55 |
+
ctx.down_x = down_x
|
| 56 |
+
ctx.down_y = down_y
|
| 57 |
+
ctx.pad_x0 = pad_x0
|
| 58 |
+
ctx.pad_x1 = pad_x1
|
| 59 |
+
ctx.pad_y0 = pad_y0
|
| 60 |
+
ctx.pad_y1 = pad_y1
|
| 61 |
+
ctx.in_size = in_size
|
| 62 |
+
ctx.out_size = out_size
|
| 63 |
+
|
| 64 |
+
return grad_input
|
| 65 |
+
|
| 66 |
+
@staticmethod
|
| 67 |
+
def backward(ctx, gradgrad_input):
|
| 68 |
+
kernel, = ctx.saved_tensors
|
| 69 |
+
|
| 70 |
+
gradgrad_input = gradgrad_input.reshape(-1, ctx.in_size[2], ctx.in_size[3], 1)
|
| 71 |
+
|
| 72 |
+
gradgrad_out = upfirdn2d_op.upfirdn2d(
|
| 73 |
+
gradgrad_input,
|
| 74 |
+
kernel,
|
| 75 |
+
ctx.up_x,
|
| 76 |
+
ctx.up_y,
|
| 77 |
+
ctx.down_x,
|
| 78 |
+
ctx.down_y,
|
| 79 |
+
ctx.pad_x0,
|
| 80 |
+
ctx.pad_x1,
|
| 81 |
+
ctx.pad_y0,
|
| 82 |
+
ctx.pad_y1,
|
| 83 |
+
)
|
| 84 |
+
# gradgrad_out = gradgrad_out.view(ctx.in_size[0], ctx.out_size[0], ctx.out_size[1], ctx.in_size[3])
|
| 85 |
+
gradgrad_out = gradgrad_out.view(
|
| 86 |
+
ctx.in_size[0], ctx.in_size[1], ctx.out_size[0], ctx.out_size[1]
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
return gradgrad_out, None, None, None, None, None, None, None, None
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class UpFirDn2d(Function):
|
| 93 |
+
@staticmethod
|
| 94 |
+
def forward(ctx, input, kernel, up, down, pad):
|
| 95 |
+
up_x, up_y = up
|
| 96 |
+
down_x, down_y = down
|
| 97 |
+
pad_x0, pad_x1, pad_y0, pad_y1 = pad
|
| 98 |
+
|
| 99 |
+
kernel_h, kernel_w = kernel.shape
|
| 100 |
+
batch, channel, in_h, in_w = input.shape
|
| 101 |
+
ctx.in_size = input.shape
|
| 102 |
+
|
| 103 |
+
input = input.reshape(-1, in_h, in_w, 1)
|
| 104 |
+
|
| 105 |
+
ctx.save_for_backward(kernel, torch.flip(kernel, [0, 1]))
|
| 106 |
+
|
| 107 |
+
out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h) // down_y + 1
|
| 108 |
+
out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w) // down_x + 1
|
| 109 |
+
ctx.out_size = (out_h, out_w)
|
| 110 |
+
|
| 111 |
+
ctx.up = (up_x, up_y)
|
| 112 |
+
ctx.down = (down_x, down_y)
|
| 113 |
+
ctx.pad = (pad_x0, pad_x1, pad_y0, pad_y1)
|
| 114 |
+
|
| 115 |
+
g_pad_x0 = kernel_w - pad_x0 - 1
|
| 116 |
+
g_pad_y0 = kernel_h - pad_y0 - 1
|
| 117 |
+
g_pad_x1 = in_w * up_x - out_w * down_x + pad_x0 - up_x + 1
|
| 118 |
+
g_pad_y1 = in_h * up_y - out_h * down_y + pad_y0 - up_y + 1
|
| 119 |
+
|
| 120 |
+
ctx.g_pad = (g_pad_x0, g_pad_x1, g_pad_y0, g_pad_y1)
|
| 121 |
+
|
| 122 |
+
out = upfirdn2d_op.upfirdn2d(
|
| 123 |
+
input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1
|
| 124 |
+
)
|
| 125 |
+
# out = out.view(major, out_h, out_w, minor)
|
| 126 |
+
out = out.view(-1, channel, out_h, out_w)
|
| 127 |
+
|
| 128 |
+
return out
|
| 129 |
+
|
| 130 |
+
@staticmethod
|
| 131 |
+
def backward(ctx, grad_output):
|
| 132 |
+
kernel, grad_kernel = ctx.saved_tensors
|
| 133 |
+
|
| 134 |
+
grad_input = UpFirDn2dBackward.apply(
|
| 135 |
+
grad_output,
|
| 136 |
+
kernel,
|
| 137 |
+
grad_kernel,
|
| 138 |
+
ctx.up,
|
| 139 |
+
ctx.down,
|
| 140 |
+
ctx.pad,
|
| 141 |
+
ctx.g_pad,
|
| 142 |
+
ctx.in_size,
|
| 143 |
+
ctx.out_size,
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
return grad_input, None, None, None, None
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def upfirdn2d(input, kernel, up=1, down=1, pad=(0, 0), device='cpu'):
|
| 150 |
+
if platform.system() == 'Linux' and torch.cuda.is_available() and device != 'cpu':
|
| 151 |
+
out = UpFirDn2d.apply(
|
| 152 |
+
input, kernel, (up, up), (down, down), (pad[0], pad[1], pad[0], pad[1])
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
out = upfirdn2d_native(input, kernel, up, up, down, down, pad[0], pad[1], pad[0], pad[1])
|
| 156 |
+
|
| 157 |
+
return out
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def upfirdn2d_native(
|
| 161 |
+
input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1
|
| 162 |
+
):
|
| 163 |
+
input = input.permute(0, 2, 3, 1)
|
| 164 |
+
_, in_h, in_w, minor = input.shape
|
| 165 |
+
kernel_h, kernel_w = kernel.shape
|
| 166 |
+
out = input.view(-1, in_h, 1, in_w, 1, minor)
|
| 167 |
+
out = F.pad(out, [0, 0, 0, up_x - 1, 0, 0, 0, up_y - 1])
|
| 168 |
+
out = out.view(-1, in_h * up_y, in_w * up_x, minor)
|
| 169 |
+
|
| 170 |
+
out = F.pad(
|
| 171 |
+
out, [0, 0, max(pad_x0, 0), max(pad_x1, 0), max(pad_y0, 0), max(pad_y1, 0)]
|
| 172 |
+
)
|
| 173 |
+
out = out[
|
| 174 |
+
:,
|
| 175 |
+
max(-pad_y0, 0) : out.shape[1] - max(-pad_y1, 0),
|
| 176 |
+
max(-pad_x0, 0) : out.shape[2] - max(-pad_x1, 0),
|
| 177 |
+
:,
|
| 178 |
+
]
|
| 179 |
+
|
| 180 |
+
out = out.permute(0, 3, 1, 2)
|
| 181 |
+
out = out.reshape(
|
| 182 |
+
[-1, 1, in_h * up_y + pad_y0 + pad_y1, in_w * up_x + pad_x0 + pad_x1]
|
| 183 |
+
)
|
| 184 |
+
w = torch.flip(kernel, [0, 1]).view(1, 1, kernel_h, kernel_w)
|
| 185 |
+
out = F.conv2d(out, w)
|
| 186 |
+
out = out.reshape(
|
| 187 |
+
-1,
|
| 188 |
+
minor,
|
| 189 |
+
in_h * up_y + pad_y0 + pad_y1 - kernel_h + 1,
|
| 190 |
+
in_w * up_x + pad_x0 + pad_x1 - kernel_w + 1,
|
| 191 |
+
)
|
| 192 |
+
# out = out.permute(0, 2, 3, 1)
|
| 193 |
+
return out[:, :, ::down_y, ::down_x]
|
| 194 |
+
|
third_party/GPEN/face_model/op/upfirdn2d_kernel.cu
ADDED
|
@@ -0,0 +1,272 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Copyright (c) 2019, NVIDIA Corporation. All rights reserved.
|
| 2 |
+
//
|
| 3 |
+
// This work is made available under the Nvidia Source Code License-NC.
|
| 4 |
+
// To view a copy of this license, visit
|
| 5 |
+
// https://nvlabs.github.io/stylegan2/license.html
|
| 6 |
+
|
| 7 |
+
#include <torch/types.h>
|
| 8 |
+
|
| 9 |
+
#include <ATen/ATen.h>
|
| 10 |
+
#include <ATen/AccumulateType.h>
|
| 11 |
+
#include <ATen/cuda/CUDAContext.h>
|
| 12 |
+
#include <ATen/cuda/CUDAApplyUtils.cuh>
|
| 13 |
+
|
| 14 |
+
#include <cuda.h>
|
| 15 |
+
#include <cuda_runtime.h>
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
static __host__ __device__ __forceinline__ int floor_div(int a, int b) {
|
| 19 |
+
int c = a / b;
|
| 20 |
+
|
| 21 |
+
if (c * b > a) {
|
| 22 |
+
c--;
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
return c;
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
struct UpFirDn2DKernelParams {
|
| 30 |
+
int up_x;
|
| 31 |
+
int up_y;
|
| 32 |
+
int down_x;
|
| 33 |
+
int down_y;
|
| 34 |
+
int pad_x0;
|
| 35 |
+
int pad_x1;
|
| 36 |
+
int pad_y0;
|
| 37 |
+
int pad_y1;
|
| 38 |
+
|
| 39 |
+
int major_dim;
|
| 40 |
+
int in_h;
|
| 41 |
+
int in_w;
|
| 42 |
+
int minor_dim;
|
| 43 |
+
int kernel_h;
|
| 44 |
+
int kernel_w;
|
| 45 |
+
int out_h;
|
| 46 |
+
int out_w;
|
| 47 |
+
int loop_major;
|
| 48 |
+
int loop_x;
|
| 49 |
+
};
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
template <typename scalar_t, int up_x, int up_y, int down_x, int down_y, int kernel_h, int kernel_w, int tile_out_h, int tile_out_w>
|
| 53 |
+
__global__ void upfirdn2d_kernel(scalar_t* out, const scalar_t* input, const scalar_t* kernel, const UpFirDn2DKernelParams p) {
|
| 54 |
+
const int tile_in_h = ((tile_out_h - 1) * down_y + kernel_h - 1) / up_y + 1;
|
| 55 |
+
const int tile_in_w = ((tile_out_w - 1) * down_x + kernel_w - 1) / up_x + 1;
|
| 56 |
+
|
| 57 |
+
__shared__ volatile float sk[kernel_h][kernel_w];
|
| 58 |
+
__shared__ volatile float sx[tile_in_h][tile_in_w];
|
| 59 |
+
|
| 60 |
+
int minor_idx = blockIdx.x;
|
| 61 |
+
int tile_out_y = minor_idx / p.minor_dim;
|
| 62 |
+
minor_idx -= tile_out_y * p.minor_dim;
|
| 63 |
+
tile_out_y *= tile_out_h;
|
| 64 |
+
int tile_out_x_base = blockIdx.y * p.loop_x * tile_out_w;
|
| 65 |
+
int major_idx_base = blockIdx.z * p.loop_major;
|
| 66 |
+
|
| 67 |
+
if (tile_out_x_base >= p.out_w | tile_out_y >= p.out_h | major_idx_base >= p.major_dim) {
|
| 68 |
+
return;
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
for (int tap_idx = threadIdx.x; tap_idx < kernel_h * kernel_w; tap_idx += blockDim.x) {
|
| 72 |
+
int ky = tap_idx / kernel_w;
|
| 73 |
+
int kx = tap_idx - ky * kernel_w;
|
| 74 |
+
scalar_t v = 0.0;
|
| 75 |
+
|
| 76 |
+
if (kx < p.kernel_w & ky < p.kernel_h) {
|
| 77 |
+
v = kernel[(p.kernel_h - 1 - ky) * p.kernel_w + (p.kernel_w - 1 - kx)];
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
sk[ky][kx] = v;
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
for (int loop_major = 0, major_idx = major_idx_base; loop_major < p.loop_major & major_idx < p.major_dim; loop_major++, major_idx++) {
|
| 84 |
+
for (int loop_x = 0, tile_out_x = tile_out_x_base; loop_x < p.loop_x & tile_out_x < p.out_w; loop_x++, tile_out_x += tile_out_w) {
|
| 85 |
+
int tile_mid_x = tile_out_x * down_x + up_x - 1 - p.pad_x0;
|
| 86 |
+
int tile_mid_y = tile_out_y * down_y + up_y - 1 - p.pad_y0;
|
| 87 |
+
int tile_in_x = floor_div(tile_mid_x, up_x);
|
| 88 |
+
int tile_in_y = floor_div(tile_mid_y, up_y);
|
| 89 |
+
|
| 90 |
+
__syncthreads();
|
| 91 |
+
|
| 92 |
+
for (int in_idx = threadIdx.x; in_idx < tile_in_h * tile_in_w; in_idx += blockDim.x) {
|
| 93 |
+
int rel_in_y = in_idx / tile_in_w;
|
| 94 |
+
int rel_in_x = in_idx - rel_in_y * tile_in_w;
|
| 95 |
+
int in_x = rel_in_x + tile_in_x;
|
| 96 |
+
int in_y = rel_in_y + tile_in_y;
|
| 97 |
+
|
| 98 |
+
scalar_t v = 0.0;
|
| 99 |
+
|
| 100 |
+
if (in_x >= 0 & in_y >= 0 & in_x < p.in_w & in_y < p.in_h) {
|
| 101 |
+
v = input[((major_idx * p.in_h + in_y) * p.in_w + in_x) * p.minor_dim + minor_idx];
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
sx[rel_in_y][rel_in_x] = v;
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
__syncthreads();
|
| 108 |
+
for (int out_idx = threadIdx.x; out_idx < tile_out_h * tile_out_w; out_idx += blockDim.x) {
|
| 109 |
+
int rel_out_y = out_idx / tile_out_w;
|
| 110 |
+
int rel_out_x = out_idx - rel_out_y * tile_out_w;
|
| 111 |
+
int out_x = rel_out_x + tile_out_x;
|
| 112 |
+
int out_y = rel_out_y + tile_out_y;
|
| 113 |
+
|
| 114 |
+
int mid_x = tile_mid_x + rel_out_x * down_x;
|
| 115 |
+
int mid_y = tile_mid_y + rel_out_y * down_y;
|
| 116 |
+
int in_x = floor_div(mid_x, up_x);
|
| 117 |
+
int in_y = floor_div(mid_y, up_y);
|
| 118 |
+
int rel_in_x = in_x - tile_in_x;
|
| 119 |
+
int rel_in_y = in_y - tile_in_y;
|
| 120 |
+
int kernel_x = (in_x + 1) * up_x - mid_x - 1;
|
| 121 |
+
int kernel_y = (in_y + 1) * up_y - mid_y - 1;
|
| 122 |
+
|
| 123 |
+
scalar_t v = 0.0;
|
| 124 |
+
|
| 125 |
+
#pragma unroll
|
| 126 |
+
for (int y = 0; y < kernel_h / up_y; y++)
|
| 127 |
+
#pragma unroll
|
| 128 |
+
for (int x = 0; x < kernel_w / up_x; x++)
|
| 129 |
+
v += sx[rel_in_y + y][rel_in_x + x] * sk[kernel_y + y * up_y][kernel_x + x * up_x];
|
| 130 |
+
|
| 131 |
+
if (out_x < p.out_w & out_y < p.out_h) {
|
| 132 |
+
out[((major_idx * p.out_h + out_y) * p.out_w + out_x) * p.minor_dim + minor_idx] = v;
|
| 133 |
+
}
|
| 134 |
+
}
|
| 135 |
+
}
|
| 136 |
+
}
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
torch::Tensor upfirdn2d_op(const torch::Tensor& input, const torch::Tensor& kernel,
|
| 141 |
+
int up_x, int up_y, int down_x, int down_y,
|
| 142 |
+
int pad_x0, int pad_x1, int pad_y0, int pad_y1) {
|
| 143 |
+
int curDevice = -1;
|
| 144 |
+
cudaGetDevice(&curDevice);
|
| 145 |
+
cudaStream_t stream = at::cuda::getCurrentCUDAStream(curDevice);
|
| 146 |
+
|
| 147 |
+
UpFirDn2DKernelParams p;
|
| 148 |
+
|
| 149 |
+
auto x = input.contiguous();
|
| 150 |
+
auto k = kernel.contiguous();
|
| 151 |
+
|
| 152 |
+
p.major_dim = x.size(0);
|
| 153 |
+
p.in_h = x.size(1);
|
| 154 |
+
p.in_w = x.size(2);
|
| 155 |
+
p.minor_dim = x.size(3);
|
| 156 |
+
p.kernel_h = k.size(0);
|
| 157 |
+
p.kernel_w = k.size(1);
|
| 158 |
+
p.up_x = up_x;
|
| 159 |
+
p.up_y = up_y;
|
| 160 |
+
p.down_x = down_x;
|
| 161 |
+
p.down_y = down_y;
|
| 162 |
+
p.pad_x0 = pad_x0;
|
| 163 |
+
p.pad_x1 = pad_x1;
|
| 164 |
+
p.pad_y0 = pad_y0;
|
| 165 |
+
p.pad_y1 = pad_y1;
|
| 166 |
+
|
| 167 |
+
p.out_h = (p.in_h * p.up_y + p.pad_y0 + p.pad_y1 - p.kernel_h + p.down_y) / p.down_y;
|
| 168 |
+
p.out_w = (p.in_w * p.up_x + p.pad_x0 + p.pad_x1 - p.kernel_w + p.down_x) / p.down_x;
|
| 169 |
+
|
| 170 |
+
auto out = at::empty({p.major_dim, p.out_h, p.out_w, p.minor_dim}, x.options());
|
| 171 |
+
|
| 172 |
+
int mode = -1;
|
| 173 |
+
|
| 174 |
+
int tile_out_h;
|
| 175 |
+
int tile_out_w;
|
| 176 |
+
|
| 177 |
+
if (p.up_x == 1 && p.up_y == 1 && p.down_x == 1 && p.down_y == 1 && p.kernel_h <= 4 && p.kernel_w <= 4) {
|
| 178 |
+
mode = 1;
|
| 179 |
+
tile_out_h = 16;
|
| 180 |
+
tile_out_w = 64;
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
if (p.up_x == 1 && p.up_y == 1 && p.down_x == 1 && p.down_y == 1 && p.kernel_h <= 3 && p.kernel_w <= 3) {
|
| 184 |
+
mode = 2;
|
| 185 |
+
tile_out_h = 16;
|
| 186 |
+
tile_out_w = 64;
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
if (p.up_x == 2 && p.up_y == 2 && p.down_x == 1 && p.down_y == 1 && p.kernel_h <= 4 && p.kernel_w <= 4) {
|
| 190 |
+
mode = 3;
|
| 191 |
+
tile_out_h = 16;
|
| 192 |
+
tile_out_w = 64;
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
if (p.up_x == 2 && p.up_y == 2 && p.down_x == 1 && p.down_y == 1 && p.kernel_h <= 2 && p.kernel_w <= 2) {
|
| 196 |
+
mode = 4;
|
| 197 |
+
tile_out_h = 16;
|
| 198 |
+
tile_out_w = 64;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
if (p.up_x == 1 && p.up_y == 1 && p.down_x == 2 && p.down_y == 2 && p.kernel_h <= 4 && p.kernel_w <= 4) {
|
| 202 |
+
mode = 5;
|
| 203 |
+
tile_out_h = 8;
|
| 204 |
+
tile_out_w = 32;
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
if (p.up_x == 1 && p.up_y == 1 && p.down_x == 2 && p.down_y == 2 && p.kernel_h <= 2 && p.kernel_w <= 2) {
|
| 208 |
+
mode = 6;
|
| 209 |
+
tile_out_h = 8;
|
| 210 |
+
tile_out_w = 32;
|
| 211 |
+
}
|
| 212 |
+
|
| 213 |
+
dim3 block_size;
|
| 214 |
+
dim3 grid_size;
|
| 215 |
+
|
| 216 |
+
if (tile_out_h > 0 && tile_out_w) {
|
| 217 |
+
p.loop_major = (p.major_dim - 1) / 16384 + 1;
|
| 218 |
+
p.loop_x = 1;
|
| 219 |
+
block_size = dim3(32 * 8, 1, 1);
|
| 220 |
+
grid_size = dim3(((p.out_h - 1) / tile_out_h + 1) * p.minor_dim,
|
| 221 |
+
(p.out_w - 1) / (p.loop_x * tile_out_w) + 1,
|
| 222 |
+
(p.major_dim - 1) / p.loop_major + 1);
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "upfirdn2d_cuda", [&] {
|
| 226 |
+
switch (mode) {
|
| 227 |
+
case 1:
|
| 228 |
+
upfirdn2d_kernel<scalar_t, 1, 1, 1, 1, 4, 4, 16, 64><<<grid_size, block_size, 0, stream>>>(
|
| 229 |
+
out.data_ptr<scalar_t>(), x.data_ptr<scalar_t>(), k.data_ptr<scalar_t>(), p
|
| 230 |
+
);
|
| 231 |
+
|
| 232 |
+
break;
|
| 233 |
+
|
| 234 |
+
case 2:
|
| 235 |
+
upfirdn2d_kernel<scalar_t, 1, 1, 1, 1, 3, 3, 16, 64><<<grid_size, block_size, 0, stream>>>(
|
| 236 |
+
out.data_ptr<scalar_t>(), x.data_ptr<scalar_t>(), k.data_ptr<scalar_t>(), p
|
| 237 |
+
);
|
| 238 |
+
|
| 239 |
+
break;
|
| 240 |
+
|
| 241 |
+
case 3:
|
| 242 |
+
upfirdn2d_kernel<scalar_t, 2, 2, 1, 1, 4, 4, 16, 64><<<grid_size, block_size, 0, stream>>>(
|
| 243 |
+
out.data_ptr<scalar_t>(), x.data_ptr<scalar_t>(), k.data_ptr<scalar_t>(), p
|
| 244 |
+
);
|
| 245 |
+
|
| 246 |
+
break;
|
| 247 |
+
|
| 248 |
+
case 4:
|
| 249 |
+
upfirdn2d_kernel<scalar_t, 2, 2, 1, 1, 2, 2, 16, 64><<<grid_size, block_size, 0, stream>>>(
|
| 250 |
+
out.data_ptr<scalar_t>(), x.data_ptr<scalar_t>(), k.data_ptr<scalar_t>(), p
|
| 251 |
+
);
|
| 252 |
+
|
| 253 |
+
break;
|
| 254 |
+
|
| 255 |
+
case 5:
|
| 256 |
+
upfirdn2d_kernel<scalar_t, 1, 1, 2, 2, 4, 4, 8, 32><<<grid_size, block_size, 0, stream>>>(
|
| 257 |
+
out.data_ptr<scalar_t>(), x.data_ptr<scalar_t>(), k.data_ptr<scalar_t>(), p
|
| 258 |
+
);
|
| 259 |
+
|
| 260 |
+
break;
|
| 261 |
+
|
| 262 |
+
case 6:
|
| 263 |
+
upfirdn2d_kernel<scalar_t, 1, 1, 2, 2, 4, 4, 8, 32><<<grid_size, block_size, 0, stream>>>(
|
| 264 |
+
out.data_ptr<scalar_t>(), x.data_ptr<scalar_t>(), k.data_ptr<scalar_t>(), p
|
| 265 |
+
);
|
| 266 |
+
|
| 267 |
+
break;
|
| 268 |
+
}
|
| 269 |
+
});
|
| 270 |
+
|
| 271 |
+
return out;
|
| 272 |
+
}
|
third_party/GPEN/face_parse/blocks.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import torch
|
| 3 |
+
import torch.nn as nn
|
| 4 |
+
from torch.nn.parameter import Parameter
|
| 5 |
+
from torch.nn import functional as F
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
class NormLayer(nn.Module):
|
| 9 |
+
"""Normalization Layers.
|
| 10 |
+
------------
|
| 11 |
+
# Arguments
|
| 12 |
+
- channels: input channels, for batch norm and instance norm.
|
| 13 |
+
- input_size: input shape without batch size, for layer norm.
|
| 14 |
+
"""
|
| 15 |
+
def __init__(self, channels, normalize_shape=None, norm_type='bn', ref_channels=None):
|
| 16 |
+
super(NormLayer, self).__init__()
|
| 17 |
+
norm_type = norm_type.lower()
|
| 18 |
+
self.norm_type = norm_type
|
| 19 |
+
if norm_type == 'bn':
|
| 20 |
+
self.norm = nn.BatchNorm2d(channels, affine=True)
|
| 21 |
+
elif norm_type == 'in':
|
| 22 |
+
self.norm = nn.InstanceNorm2d(channels, affine=False)
|
| 23 |
+
elif norm_type == 'gn':
|
| 24 |
+
self.norm = nn.GroupNorm(32, channels, affine=True)
|
| 25 |
+
elif norm_type == 'pixel':
|
| 26 |
+
self.norm = lambda x: F.normalize(x, p=2, dim=1)
|
| 27 |
+
elif norm_type == 'layer':
|
| 28 |
+
self.norm = nn.LayerNorm(normalize_shape)
|
| 29 |
+
elif norm_type == 'none':
|
| 30 |
+
self.norm = lambda x: x*1.0
|
| 31 |
+
else:
|
| 32 |
+
assert 1==0, 'Norm type {} not support.'.format(norm_type)
|
| 33 |
+
|
| 34 |
+
def forward(self, x, ref=None):
|
| 35 |
+
if self.norm_type == 'spade':
|
| 36 |
+
return self.norm(x, ref)
|
| 37 |
+
else:
|
| 38 |
+
return self.norm(x)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class ReluLayer(nn.Module):
|
| 42 |
+
"""Relu Layer.
|
| 43 |
+
------------
|
| 44 |
+
# Arguments
|
| 45 |
+
- relu type: type of relu layer, candidates are
|
| 46 |
+
- ReLU
|
| 47 |
+
- LeakyReLU: default relu slope 0.2
|
| 48 |
+
- PRelu
|
| 49 |
+
- SELU
|
| 50 |
+
- none: direct pass
|
| 51 |
+
"""
|
| 52 |
+
def __init__(self, channels, relu_type='relu'):
|
| 53 |
+
super(ReluLayer, self).__init__()
|
| 54 |
+
relu_type = relu_type.lower()
|
| 55 |
+
if relu_type == 'relu':
|
| 56 |
+
self.func = nn.ReLU(True)
|
| 57 |
+
elif relu_type == 'leakyrelu':
|
| 58 |
+
self.func = nn.LeakyReLU(0.2, inplace=True)
|
| 59 |
+
elif relu_type == 'prelu':
|
| 60 |
+
self.func = nn.PReLU(channels)
|
| 61 |
+
elif relu_type == 'selu':
|
| 62 |
+
self.func = nn.SELU(True)
|
| 63 |
+
elif relu_type == 'none':
|
| 64 |
+
self.func = lambda x: x*1.0
|
| 65 |
+
else:
|
| 66 |
+
assert 1==0, 'Relu type {} not support.'.format(relu_type)
|
| 67 |
+
|
| 68 |
+
def forward(self, x):
|
| 69 |
+
return self.func(x)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class ConvLayer(nn.Module):
|
| 73 |
+
def __init__(self, in_channels, out_channels, kernel_size=3, scale='none', norm_type='none', relu_type='none', use_pad=True, bias=True):
|
| 74 |
+
super(ConvLayer, self).__init__()
|
| 75 |
+
self.use_pad = use_pad
|
| 76 |
+
self.norm_type = norm_type
|
| 77 |
+
if norm_type in ['bn']:
|
| 78 |
+
bias = False
|
| 79 |
+
|
| 80 |
+
stride = 2 if scale == 'down' else 1
|
| 81 |
+
|
| 82 |
+
self.scale_func = lambda x: x
|
| 83 |
+
if scale == 'up':
|
| 84 |
+
self.scale_func = lambda x: nn.functional.interpolate(x, scale_factor=2, mode='nearest')
|
| 85 |
+
|
| 86 |
+
self.reflection_pad = nn.ReflectionPad2d(int(np.ceil((kernel_size - 1.)/2)))
|
| 87 |
+
self.conv2d = nn.Conv2d(in_channels, out_channels, kernel_size, stride, bias=bias)
|
| 88 |
+
|
| 89 |
+
self.relu = ReluLayer(out_channels, relu_type)
|
| 90 |
+
self.norm = NormLayer(out_channels, norm_type=norm_type)
|
| 91 |
+
|
| 92 |
+
def forward(self, x):
|
| 93 |
+
out = self.scale_func(x)
|
| 94 |
+
if self.use_pad:
|
| 95 |
+
out = self.reflection_pad(out)
|
| 96 |
+
out = self.conv2d(out)
|
| 97 |
+
out = self.norm(out)
|
| 98 |
+
out = self.relu(out)
|
| 99 |
+
return out
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class ResidualBlock(nn.Module):
|
| 103 |
+
"""
|
| 104 |
+
Residual block recommended in: http://torch.ch/blog/2016/02/04/resnets.html
|
| 105 |
+
"""
|
| 106 |
+
def __init__(self, c_in, c_out, relu_type='prelu', norm_type='bn', scale='none'):
|
| 107 |
+
super(ResidualBlock, self).__init__()
|
| 108 |
+
|
| 109 |
+
if scale == 'none' and c_in == c_out:
|
| 110 |
+
self.shortcut_func = lambda x: x
|
| 111 |
+
else:
|
| 112 |
+
self.shortcut_func = ConvLayer(c_in, c_out, 3, scale)
|
| 113 |
+
|
| 114 |
+
scale_config_dict = {'down': ['none', 'down'], 'up': ['up', 'none'], 'none': ['none', 'none']}
|
| 115 |
+
scale_conf = scale_config_dict[scale]
|
| 116 |
+
|
| 117 |
+
self.conv1 = ConvLayer(c_in, c_out, 3, scale_conf[0], norm_type=norm_type, relu_type=relu_type)
|
| 118 |
+
self.conv2 = ConvLayer(c_out, c_out, 3, scale_conf[1], norm_type=norm_type, relu_type='none')
|
| 119 |
+
|
| 120 |
+
def forward(self, x):
|
| 121 |
+
identity = self.shortcut_func(x)
|
| 122 |
+
|
| 123 |
+
res = self.conv1(x)
|
| 124 |
+
res = self.conv2(res)
|
| 125 |
+
return identity + res
|
| 126 |
+
|
| 127 |
+
|
third_party/GPEN/face_parse/face_parsing.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@paper: GAN Prior Embedded Network for Blind Face Restoration in the Wild (CVPR2021)
|
| 3 |
+
@author: yangxy (yangtao9009@gmail.com)
|
| 4 |
+
'''
|
| 5 |
+
import os
|
| 6 |
+
import cv2
|
| 7 |
+
import torch
|
| 8 |
+
import numpy as np
|
| 9 |
+
from parse_model import ParseNet
|
| 10 |
+
import torch.nn.functional as F
|
| 11 |
+
|
| 12 |
+
class FaceParse(object):
|
| 13 |
+
def __init__(self, base_dir='./', model='ParseNet-latest', device='cuda'):
|
| 14 |
+
self.mfile = os.path.join(base_dir, 'weights', model+'.pth')
|
| 15 |
+
self.size = 512
|
| 16 |
+
self.device = device
|
| 17 |
+
|
| 18 |
+
'''
|
| 19 |
+
0: 'background' 1: 'skin' 2: 'nose'
|
| 20 |
+
3: 'eye_g' 4: 'l_eye' 5: 'r_eye'
|
| 21 |
+
6: 'l_brow' 7: 'r_brow' 8: 'l_ear'
|
| 22 |
+
9: 'r_ear' 10: 'mouth' 11: 'u_lip'
|
| 23 |
+
12: 'l_lip' 13: 'hair' 14: 'hat'
|
| 24 |
+
15: 'ear_r' 16: 'neck_l' 17: 'neck'
|
| 25 |
+
18: 'cloth'
|
| 26 |
+
'''
|
| 27 |
+
#self.MASK_COLORMAP = [[0, 0, 0], [204, 0, 0], [76, 153, 0], [204, 204, 0], [51, 51, 255], [204, 0, 204], [0, 255, 255], [255, 204, 204], [102, 51, 0], [255, 0, 0], [102, 204, 0], [255, 255, 0], [0, 0, 153], [0, 0, 204], [255, 51, 153], [0, 204, 204], [0, 51, 0], [255, 153, 51], [0, 204, 0]]
|
| 28 |
+
#self.#MASK_COLORMAP = [[0, 0, 0], [204, 0, 0], [76, 153, 0], [204, 204, 0], [51, 51, 255], [204, 0, 204], [0, 255, 255], [255, 204, 204], [102, 51, 0], [255, 0, 0], [102, 204, 0], [255, 255, 0], [0, 0, 153], [0, 0, 204], [255, 51, 153], [0, 204, 204], [0, 51, 0], [255, 153, 51], [0, 204, 0]] = [[0, 0, 0], [204, 0, 0], [76, 153, 0], [204, 204, 0], [51, 51, 255], [204, 0, 204], [0, 255, 255], [255, 204, 204], [102, 51, 0], [255, 0, 0], [102, 204, 0], [255, 255, 0], [0, 0, 153], [0, 0, 204], [255, 51, 153], [0, 204, 204], [0, 51, 0], [0, 0, 0], [0, 0, 0]]
|
| 29 |
+
self.MASK_COLORMAP = [0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 0, 255, 0, 0, 0]
|
| 30 |
+
self.load_model()
|
| 31 |
+
|
| 32 |
+
def load_model(self):
|
| 33 |
+
self.faceparse = ParseNet(self.size, self.size, 32, 64, 19, norm_type='bn', relu_type='LeakyReLU', ch_range=[32, 256])
|
| 34 |
+
self.faceparse.load_state_dict(torch.load(self.mfile, map_location=torch.device('cpu')))
|
| 35 |
+
self.faceparse.to(self.device)
|
| 36 |
+
self.faceparse.eval()
|
| 37 |
+
|
| 38 |
+
def process(self, im):
|
| 39 |
+
im = cv2.resize(im, (self.size, self.size))
|
| 40 |
+
imt = self.img2tensor(im)
|
| 41 |
+
pred_mask, sr_img_tensor = self.faceparse(imt)
|
| 42 |
+
mask = self.tenor2mask(pred_mask)
|
| 43 |
+
|
| 44 |
+
return mask
|
| 45 |
+
|
| 46 |
+
def process_tensor(self, imt):
|
| 47 |
+
imt = F.interpolate(imt.flip(1)*2-1, (self.size, self.size))
|
| 48 |
+
pred_mask, sr_img_tensor = self.faceparse(imt)
|
| 49 |
+
|
| 50 |
+
mask = pred_mask.argmax(dim=1)
|
| 51 |
+
for idx, color in enumerate(self.MASK_COLORMAP):
|
| 52 |
+
mask = torch.where(mask==idx, color, mask)
|
| 53 |
+
#mask = mask.repeat(3, 1, 1).unsqueeze(0) #.cpu().float().numpy()
|
| 54 |
+
mask = mask.unsqueeze(0)
|
| 55 |
+
|
| 56 |
+
return mask
|
| 57 |
+
|
| 58 |
+
def img2tensor(self, img):
|
| 59 |
+
img = img[..., ::-1]
|
| 60 |
+
img = img / 255. * 2 - 1
|
| 61 |
+
img_tensor = torch.from_numpy(img.transpose(2, 0, 1)).unsqueeze(0).to(self.device)
|
| 62 |
+
return img_tensor.float()
|
| 63 |
+
|
| 64 |
+
def tenor2mask(self, tensor):
|
| 65 |
+
if len(tensor.shape) < 4:
|
| 66 |
+
tensor = tensor.unsqueeze(0)
|
| 67 |
+
if tensor.shape[1] > 1:
|
| 68 |
+
tensor = tensor.argmax(dim=1)
|
| 69 |
+
|
| 70 |
+
tensor = tensor.squeeze(1).data.cpu().numpy()
|
| 71 |
+
color_maps = []
|
| 72 |
+
for t in tensor:
|
| 73 |
+
#tmp_img = np.zeros(tensor.shape[1:] + (3,))
|
| 74 |
+
tmp_img = np.zeros(tensor.shape[1:])
|
| 75 |
+
for idx, color in enumerate(self.MASK_COLORMAP):
|
| 76 |
+
tmp_img[t == idx] = color
|
| 77 |
+
color_maps.append(tmp_img.astype(np.uint8))
|
| 78 |
+
return color_maps
|
third_party/GPEN/face_parse/mask.png
ADDED
|
third_party/GPEN/face_parse/parse_model.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
@Created by chaofengc (chaofenghust@gmail.com)
|
| 3 |
+
|
| 4 |
+
@Modified by yangxy (yangtao9009@gmail.com)
|
| 5 |
+
'''
|
| 6 |
+
|
| 7 |
+
from blocks import *
|
| 8 |
+
import torch
|
| 9 |
+
from torch import nn
|
| 10 |
+
import numpy as np
|
| 11 |
+
|
| 12 |
+
def define_P(in_size=512, out_size=512, min_feat_size=32, relu_type='LeakyReLU', isTrain=False, weight_path=None):
|
| 13 |
+
net = ParseNet(in_size, out_size, min_feat_size, 64, 19, norm_type='bn', relu_type=relu_type, ch_range=[32, 256])
|
| 14 |
+
if not isTrain:
|
| 15 |
+
net.eval()
|
| 16 |
+
if weight_path is not None:
|
| 17 |
+
net.load_state_dict(torch.load(weight_path))
|
| 18 |
+
return net
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class ParseNet(nn.Module):
|
| 22 |
+
def __init__(self,
|
| 23 |
+
in_size=128,
|
| 24 |
+
out_size=128,
|
| 25 |
+
min_feat_size=32,
|
| 26 |
+
base_ch=64,
|
| 27 |
+
parsing_ch=19,
|
| 28 |
+
res_depth=10,
|
| 29 |
+
relu_type='prelu',
|
| 30 |
+
norm_type='bn',
|
| 31 |
+
ch_range=[32, 512],
|
| 32 |
+
):
|
| 33 |
+
super().__init__()
|
| 34 |
+
self.res_depth = res_depth
|
| 35 |
+
act_args = {'norm_type': norm_type, 'relu_type': relu_type}
|
| 36 |
+
min_ch, max_ch = ch_range
|
| 37 |
+
|
| 38 |
+
ch_clip = lambda x: max(min_ch, min(x, max_ch))
|
| 39 |
+
min_feat_size = min(in_size, min_feat_size)
|
| 40 |
+
|
| 41 |
+
down_steps = int(np.log2(in_size//min_feat_size))
|
| 42 |
+
up_steps = int(np.log2(out_size//min_feat_size))
|
| 43 |
+
|
| 44 |
+
# =============== define encoder-body-decoder ====================
|
| 45 |
+
self.encoder = []
|
| 46 |
+
self.encoder.append(ConvLayer(3, base_ch, 3, 1))
|
| 47 |
+
head_ch = base_ch
|
| 48 |
+
for i in range(down_steps):
|
| 49 |
+
cin, cout = ch_clip(head_ch), ch_clip(head_ch * 2)
|
| 50 |
+
self.encoder.append(ResidualBlock(cin, cout, scale='down', **act_args))
|
| 51 |
+
head_ch = head_ch * 2
|
| 52 |
+
|
| 53 |
+
self.body = []
|
| 54 |
+
for i in range(res_depth):
|
| 55 |
+
self.body.append(ResidualBlock(ch_clip(head_ch), ch_clip(head_ch), **act_args))
|
| 56 |
+
|
| 57 |
+
self.decoder = []
|
| 58 |
+
for i in range(up_steps):
|
| 59 |
+
cin, cout = ch_clip(head_ch), ch_clip(head_ch // 2)
|
| 60 |
+
self.decoder.append(ResidualBlock(cin, cout, scale='up', **act_args))
|
| 61 |
+
head_ch = head_ch // 2
|
| 62 |
+
|
| 63 |
+
self.encoder = nn.Sequential(*self.encoder)
|
| 64 |
+
self.body = nn.Sequential(*self.body)
|
| 65 |
+
self.decoder = nn.Sequential(*self.decoder)
|
| 66 |
+
self.out_img_conv = ConvLayer(ch_clip(head_ch), 3)
|
| 67 |
+
self.out_mask_conv = ConvLayer(ch_clip(head_ch), parsing_ch)
|
| 68 |
+
|
| 69 |
+
def forward(self, x):
|
| 70 |
+
feat = self.encoder(x)
|
| 71 |
+
x = feat + self.body(feat)
|
| 72 |
+
x = self.decoder(x)
|
| 73 |
+
out_img = self.out_img_conv(x)
|
| 74 |
+
out_mask = self.out_mask_conv(x)
|
| 75 |
+
return out_mask, out_img
|
| 76 |
+
|
| 77 |
+
|
third_party/GPEN/face_parse/test.png
ADDED
|
third_party/GPEN/infer_image.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import cv2
|
| 3 |
+
import numpy as np
|
| 4 |
+
from PIL import Image
|
| 5 |
+
import glob
|
| 6 |
+
|
| 7 |
+
import torch
|
| 8 |
+
import tqdm
|
| 9 |
+
import shutil
|
| 10 |
+
import argparse
|
| 11 |
+
from third_party.GPEN.face_enhancement import FaceEnhancement
|
| 12 |
+
|
| 13 |
+
make_abs_path = lambda fn: os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), fn))
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class GPENImageInfer(object):
|
| 17 |
+
def __init__(self):
|
| 18 |
+
super(GPENImageInfer, self).__init__()
|
| 19 |
+
|
| 20 |
+
model = {
|
| 21 |
+
"name": "GPEN-BFR-512",
|
| 22 |
+
"in_size": 512,
|
| 23 |
+
"out_size": 512,
|
| 24 |
+
"channel_multiplier": 2,
|
| 25 |
+
"narrow": 1,
|
| 26 |
+
}
|
| 27 |
+
faceenhancer = FaceEnhancement(
|
| 28 |
+
base_dir=make_abs_path('./'),
|
| 29 |
+
use_sr=True,
|
| 30 |
+
in_size=model["in_size"],
|
| 31 |
+
out_size=model["out_size"],
|
| 32 |
+
model=model["name"],
|
| 33 |
+
channel_multiplier=model["channel_multiplier"],
|
| 34 |
+
narrow=model["narrow"],
|
| 35 |
+
)
|
| 36 |
+
self.faceenhancer = faceenhancer
|
| 37 |
+
|
| 38 |
+
def image_infer(self, in_img: np.ndarray):
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
:param in_img: np.ndarray, (H,W,BGR), in [0,255]
|
| 42 |
+
:return: out_img: np.ndarray, (H,W,BGR), in [0,255]
|
| 43 |
+
"""
|
| 44 |
+
h, w, _ = in_img.shape
|
| 45 |
+
out_img, orig_faces, enhanced_faces = self.faceenhancer.process(in_img)
|
| 46 |
+
out_img = cv2.resize(out_img, (w, h))
|
| 47 |
+
return out_img
|
| 48 |
+
|
| 49 |
+
def ndarray_infer(self, in_ndarray: np.ndarray,
|
| 50 |
+
save_folder: str = 'demo_images/out/',
|
| 51 |
+
save_name: str = 'reen.png',
|
| 52 |
+
):
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
:param in_ndarray: np.ndarray, (N,H,W,BGR), in [0,255]
|
| 56 |
+
:param save_folder: not used
|
| 57 |
+
:param save_name: not used
|
| 58 |
+
:return: out_ndarray: np.ndarray, (N,H,W,BGR), in [0,255]
|
| 59 |
+
"""
|
| 60 |
+
B, H, W, C = in_ndarray.shape
|
| 61 |
+
|
| 62 |
+
out_ndarray = np.zeros_like(in_ndarray, dtype=np.uint8) # (N,H,W,BGR)
|
| 63 |
+
for b_idx in range(B):
|
| 64 |
+
single_img = in_ndarray[b_idx]
|
| 65 |
+
out_img = self.image_infer(single_img) # (H,W,BGR), in [0,255]
|
| 66 |
+
out_ndarray[b_idx] = out_img
|
| 67 |
+
return out_ndarray
|
| 68 |
+
|
| 69 |
+
def batch_infer(self, in_batch: torch.Tensor,
|
| 70 |
+
save_folder: str = 'demo_images/out/',
|
| 71 |
+
save_name: str = 'reen.png',
|
| 72 |
+
save_batch_idx: int = 0,
|
| 73 |
+
):
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
:param in_batch: (N,RGB,H,W), in [-1,1]
|
| 77 |
+
:return: out_batch: (N,RGB,H,W), in [-1,1]
|
| 78 |
+
"""
|
| 79 |
+
B, C, H, W = in_batch.shape
|
| 80 |
+
|
| 81 |
+
in_batch = ((in_batch + 1.) * 127.5).permute(0, 2, 3, 1)
|
| 82 |
+
in_batch = in_batch.cpu().numpy().astype(np.uint8) # (N,H,W,RGB), in [0,255]
|
| 83 |
+
in_batch = in_batch[:, :, :, ::-1] # (N,H,W,BGR)
|
| 84 |
+
|
| 85 |
+
out_batch = np.zeros_like(in_batch, dtype=np.uint8) # (N,H,W,BGR)
|
| 86 |
+
for b_idx in range(B):
|
| 87 |
+
single_img = in_batch[b_idx]
|
| 88 |
+
out_img = self.image_infer(single_img) # (H,W,BGR), in [0,255]
|
| 89 |
+
out_batch[b_idx] = out_img[:, :, ::-1]
|
| 90 |
+
if save_batch_idx is not None and b_idx == save_batch_idx:
|
| 91 |
+
cv2.imwrite(os.path.join(save_folder, save_name), out_img)
|
| 92 |
+
out_batch = torch.FloatTensor(out_batch).cuda()
|
| 93 |
+
out_batch = out_batch / 127.5 - 1. # (N,H,W,RGB)
|
| 94 |
+
out_batch = out_batch.permute(0, 3, 1, 2) # (N,RGB,H,W)
|
| 95 |
+
out_batch = out_batch.clamp(-1, 1)
|
| 96 |
+
|
| 97 |
+
return out_batch
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
if __name__ == '__main__':
|
| 101 |
+
gpen = GPENImageInfer()
|
| 102 |
+
|
| 103 |
+
in_folder = 'examples/imgs/'
|
| 104 |
+
img_list = os.listdir(in_folder)
|
| 105 |
+
|
| 106 |
+
for img_name in img_list:
|
| 107 |
+
if 'gpen' in img_name:
|
| 108 |
+
continue
|
| 109 |
+
|
| 110 |
+
in_path = os.path.join(in_folder, img_name)
|
| 111 |
+
out_path = in_path.replace('.png', '_gpen.png')
|
| 112 |
+
out_path = in_path.replace('.jpg', '_gpen.jpg')
|
| 113 |
+
|
| 114 |
+
im = cv2.imread(in_path, cv2.IMREAD_COLOR) # BGR
|
| 115 |
+
img = gpen.image_infer(im)
|
| 116 |
+
cv2.imwrite(out_path, img)
|
third_party/GPEN/infer_video.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import cv2
|
| 3 |
+
import numpy as np
|
| 4 |
+
import glob
|
| 5 |
+
import tqdm
|
| 6 |
+
import shutil
|
| 7 |
+
import argparse
|
| 8 |
+
from face_enhancement import FaceEnhancement
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def process_video(target_path, out_path, faceenhancer):
|
| 12 |
+
fps = 25.0
|
| 13 |
+
os.makedirs(out_path, exist_ok=True)
|
| 14 |
+
original_vid_path = target_path
|
| 15 |
+
vid_name = "out.mp4"
|
| 16 |
+
if not os.path.isdir(target_path):
|
| 17 |
+
vid_name = target_path.split("/")[-1]
|
| 18 |
+
vidcap = cv2.VideoCapture(target_path)
|
| 19 |
+
fps = vidcap.get(cv2.CAP_PROP_FPS)
|
| 20 |
+
try:
|
| 21 |
+
for match in glob.glob(os.path.join("./tmp/", "*.png")):
|
| 22 |
+
os.remove(match)
|
| 23 |
+
for match in glob.glob(os.path.join(out_path, "*.png")):
|
| 24 |
+
os.remove(match)
|
| 25 |
+
except Exception as e:
|
| 26 |
+
print(e)
|
| 27 |
+
os.makedirs("./tmp/", exist_ok=True)
|
| 28 |
+
os.system(
|
| 29 |
+
f"ffmpeg -i {target_path} -qscale:v 1 -qmin 1 -qmax 1 -vsync 0 ./tmp/frame_%d.png"
|
| 30 |
+
)
|
| 31 |
+
target_path = "./tmp/"
|
| 32 |
+
else:
|
| 33 |
+
print("folder not implemented.")
|
| 34 |
+
exit()
|
| 35 |
+
|
| 36 |
+
globbed_images = sorted(glob.glob(os.path.join(target_path, "*.png")))
|
| 37 |
+
for image in tqdm.tqdm(globbed_images):
|
| 38 |
+
name = image.split("/")[-1]
|
| 39 |
+
filename = os.path.join(out_path, name)
|
| 40 |
+
im = cv2.imread(image, cv2.IMREAD_COLOR) # BGR
|
| 41 |
+
h, w, _ = im.shape
|
| 42 |
+
# im = cv2.resize(im, (0,0), fx=2, fy=2) #optional
|
| 43 |
+
img, orig_faces, enhanced_faces = faceenhancer.process(im)
|
| 44 |
+
img = cv2.resize(img, (w, h))
|
| 45 |
+
cv2.imwrite(filename, img)
|
| 46 |
+
|
| 47 |
+
# merge frames to video
|
| 48 |
+
video_save_path = os.path.join(out_path, vid_name)
|
| 49 |
+
|
| 50 |
+
os.system(
|
| 51 |
+
f"ffmpeg -y -r {fps} -i {out_path}/frame_%d.png -i {original_vid_path}"
|
| 52 |
+
f" -map 0:v:0 -map 1:a? -c:a copy -c:v libx264 -r {fps} -pix_fmt yuv420p {video_save_path}"
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
# delete tmp file
|
| 56 |
+
shutil.rmtree("./tmp/")
|
| 57 |
+
for match in glob.glob(os.path.join(out_path, "*.png")):
|
| 58 |
+
os.remove(match)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
if __name__ == "__main__":
|
| 62 |
+
model = {
|
| 63 |
+
"name": "GPEN-BFR-512",
|
| 64 |
+
"in_size": 512,
|
| 65 |
+
"out_size": 512,
|
| 66 |
+
"channel_multiplier": 2,
|
| 67 |
+
"narrow": 1,
|
| 68 |
+
}
|
| 69 |
+
parser = argparse.ArgumentParser()
|
| 70 |
+
parser.add_argument("--indir", type=str, required=True, help="input file")
|
| 71 |
+
parser.add_argument(
|
| 72 |
+
"--outdir",
|
| 73 |
+
type=str,
|
| 74 |
+
required=True,
|
| 75 |
+
help="Please provide output folder which has no more than one parent dir that has not been created.",
|
| 76 |
+
)
|
| 77 |
+
args = parser.parse_args()
|
| 78 |
+
|
| 79 |
+
os.makedirs(args.outdir, exist_ok=True)
|
| 80 |
+
|
| 81 |
+
faceenhancer = FaceEnhancement(
|
| 82 |
+
use_sr=True,
|
| 83 |
+
in_size=model["in_size"],
|
| 84 |
+
out_size=model["out_size"],
|
| 85 |
+
model=model["name"],
|
| 86 |
+
channel_multiplier=model["channel_multiplier"],
|
| 87 |
+
narrow=model["narrow"],
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
process_video(
|
| 91 |
+
args.indir,
|
| 92 |
+
args.outdir,
|
| 93 |
+
faceenhancer,
|
| 94 |
+
)
|
third_party/GPEN/misc/cog.yaml
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
build:
|
| 2 |
+
gpu: true
|
| 3 |
+
python_version: "3.8"
|
| 4 |
+
system_packages:
|
| 5 |
+
- "libgl1-mesa-glx"
|
| 6 |
+
- "libglib2.0-0"
|
| 7 |
+
- "ninja-build"
|
| 8 |
+
python_packages:
|
| 9 |
+
- "torch==1.7.1"
|
| 10 |
+
- "torchvision==0.8.2"
|
| 11 |
+
- "numpy==1.20.1"
|
| 12 |
+
- "ipython==7.21.0"
|
| 13 |
+
- "Pillow==8.3.1"
|
| 14 |
+
- "scikit-image==0.18.3"
|
| 15 |
+
- "opencv-python==4.5.3.56"
|
| 16 |
+
|
| 17 |
+
predict: "predict.py:Predictor"
|