Spaces:
Sleeping
Sleeping
mrneuralnet
commited on
Commit
•
982865f
1
Parent(s):
3e2e2be
Initial commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitignore +2 -0
- app.py +72 -0
- data/FDDB/img_list.txt +2845 -0
- data/__init__.py +3 -0
- data/__pycache__/__init__.cpython-39.pyc +0 -0
- data/__pycache__/config.cpython-39.pyc +0 -0
- data/__pycache__/data_augment.cpython-39.pyc +0 -0
- data/__pycache__/wider_face.cpython-39.pyc +0 -0
- data/config.py +42 -0
- data/data_augment.py +237 -0
- data/wider_face.py +101 -0
- dataset/__init__.py +17 -0
- dataset/abstract_dataset.py +41 -0
- dataset/celeb_df.py +126 -0
- dataset/dfdc.py +124 -0
- dataset/faceforensics.py +107 -0
- dataset/wild_deepfake.py +100 -0
- extract_video.py +233 -0
- inference.py +142 -0
- layers/__init__.py +2 -0
- layers/__pycache__/__init__.cpython-39.pyc +0 -0
- layers/functions/__pycache__/prior_box.cpython-39.pyc +0 -0
- layers/functions/prior_box.py +34 -0
- layers/modules/__init__.py +3 -0
- layers/modules/__pycache__/__init__.cpython-39.pyc +0 -0
- layers/modules/__pycache__/multibox_loss.cpython-39.pyc +0 -0
- layers/modules/multibox_loss.py +125 -0
- loss/__init__.py +12 -0
- model/__init__.py +12 -0
- model/__pycache__/__init__.cpython-39.pyc +0 -0
- model/__pycache__/common.cpython-39.pyc +0 -0
- model/common.py +200 -0
- model/network/Recce.py +133 -0
- model/network/__init__.py +1 -0
- model/network/__pycache__/Recce.cpython-39.pyc +0 -0
- model/network/__pycache__/__init__.cpython-39.pyc +0 -0
- models/__init__.py +0 -0
- models/__pycache__/__init__.cpython-39.pyc +0 -0
- models/__pycache__/net.cpython-39.pyc +0 -0
- models/__pycache__/retinaface.cpython-39.pyc +0 -0
- models/net.py +137 -0
- models/retinaface.py +127 -0
- optimizer/__init__.py +30 -0
- requirements.txt +24 -0
- scheduler/__init__.py +36 -0
- trainer/__init__.py +5 -0
- trainer/abstract_trainer.py +100 -0
- trainer/exp_mgpu_trainer.py +370 -0
- trainer/exp_tester.py +144 -0
- trainer/utils.py +183 -0
.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
temps/*
|
2 |
+
!temps/.gitkeep
|
app.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import base64
|
2 |
+
import json
|
3 |
+
import os, shutil
|
4 |
+
import re
|
5 |
+
import time
|
6 |
+
import uuid
|
7 |
+
|
8 |
+
import cv2
|
9 |
+
|
10 |
+
import numpy as np
|
11 |
+
import streamlit as st
|
12 |
+
from PIL import Image
|
13 |
+
# from extract_video import extract_method_single_video
|
14 |
+
|
15 |
+
import shlex
|
16 |
+
import subprocess
|
17 |
+
|
18 |
+
def main():
|
19 |
+
st.markdown("###")
|
20 |
+
uploaded_file = st.file_uploader('Upload a picture', type=['mp4', 'jpg', 'jpeg', 'png'], accept_multiple_files=False)
|
21 |
+
if uploaded_file:
|
22 |
+
random_id = uuid.uuid1()
|
23 |
+
filename = "{}.{}".format(random_id, uploaded_file.type.split("/")[-1])
|
24 |
+
file_type = uploaded_file.type.split("/")[0]
|
25 |
+
|
26 |
+
if uploaded_file.type == 'video/mp4':
|
27 |
+
with open(f"temps/{filename}", mode='wb') as f:
|
28 |
+
f.write(uploaded_file.read())
|
29 |
+
st.video(uploaded_file)
|
30 |
+
pass
|
31 |
+
else:
|
32 |
+
img = Image.open(uploaded_file).convert('RGB')
|
33 |
+
ext = uploaded_file.type.split("/")[-1]
|
34 |
+
with open(f"temps/{filename}", mode='wb') as f:
|
35 |
+
f.write(uploaded_file.getbuffer())
|
36 |
+
st.image(img)
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
+
with st.spinner(f'Processing {file_type}...'):
|
41 |
+
subprocess.run(shlex.split(f"python.exe extract_video.py --device cuda --max_frames 50 --bs 2 --frame_interval 5 --confidence_threshold 0.997 --data_path temps/{filename}"))
|
42 |
+
st.text(f'1. Processing {file_type} ✅')
|
43 |
+
with st.spinner(f'Analyzing {file_type}...'):
|
44 |
+
pred = subprocess.run(shlex.split(f"python inference.py --weight weights/model_params_ffpp_c23.pickle --device cuda --image_folder temps/images/{filename}"), capture_output=True)
|
45 |
+
st.text(f'2. Analyzing {file_type} ✅')
|
46 |
+
|
47 |
+
print(pred)
|
48 |
+
try:
|
49 |
+
fake_probability = float(pred.stdout.decode('utf-8').split('Mean prediction: ')[-1])
|
50 |
+
if fake_probability > 0.6:
|
51 |
+
st.error(' FAKE! ', icon="🚨")
|
52 |
+
else:
|
53 |
+
st.success(" REAL FOOTAGE! ", icon="✅")
|
54 |
+
st.text("fake probability {:.2f}".format(fake_probability))
|
55 |
+
|
56 |
+
os.remove(f"temps/{filename}")
|
57 |
+
folder_name = ".".join(filename.split(".")[:-1])
|
58 |
+
shutil.rmtree(f"temps/images/{folder_name}")
|
59 |
+
except:
|
60 |
+
st.text(pred.stdout.decode('utf-8'))
|
61 |
+
|
62 |
+
st.text("")
|
63 |
+
st.text(pred)
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
+
if __name__ == "__main__":
|
68 |
+
st.set_page_config(
|
69 |
+
page_title="Nodeflux Deepfake Detection", page_icon=":pencil2:"
|
70 |
+
)
|
71 |
+
st.title("Deepfake Detection")
|
72 |
+
main()
|
data/FDDB/img_list.txt
ADDED
@@ -0,0 +1,2845 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2002/08/11/big/img_591
|
2 |
+
2002/08/26/big/img_265
|
3 |
+
2002/07/19/big/img_423
|
4 |
+
2002/08/24/big/img_490
|
5 |
+
2002/08/31/big/img_17676
|
6 |
+
2002/07/31/big/img_228
|
7 |
+
2002/07/24/big/img_402
|
8 |
+
2002/08/04/big/img_769
|
9 |
+
2002/07/19/big/img_581
|
10 |
+
2002/08/13/big/img_723
|
11 |
+
2002/08/12/big/img_821
|
12 |
+
2003/01/17/big/img_610
|
13 |
+
2002/08/13/big/img_1116
|
14 |
+
2002/08/28/big/img_19238
|
15 |
+
2002/08/21/big/img_660
|
16 |
+
2002/08/14/big/img_607
|
17 |
+
2002/08/05/big/img_3708
|
18 |
+
2002/08/19/big/img_511
|
19 |
+
2002/08/07/big/img_1316
|
20 |
+
2002/07/25/big/img_1047
|
21 |
+
2002/07/23/big/img_474
|
22 |
+
2002/07/27/big/img_970
|
23 |
+
2002/09/02/big/img_15752
|
24 |
+
2002/09/01/big/img_16378
|
25 |
+
2002/09/01/big/img_16189
|
26 |
+
2002/08/26/big/img_276
|
27 |
+
2002/07/24/big/img_518
|
28 |
+
2002/08/14/big/img_1027
|
29 |
+
2002/08/24/big/img_733
|
30 |
+
2002/08/15/big/img_249
|
31 |
+
2003/01/15/big/img_1371
|
32 |
+
2002/08/07/big/img_1348
|
33 |
+
2003/01/01/big/img_331
|
34 |
+
2002/08/23/big/img_536
|
35 |
+
2002/07/30/big/img_224
|
36 |
+
2002/08/10/big/img_763
|
37 |
+
2002/08/21/big/img_293
|
38 |
+
2002/08/15/big/img_1211
|
39 |
+
2002/08/15/big/img_1194
|
40 |
+
2003/01/15/big/img_390
|
41 |
+
2002/08/06/big/img_2893
|
42 |
+
2002/08/17/big/img_691
|
43 |
+
2002/08/07/big/img_1695
|
44 |
+
2002/08/16/big/img_829
|
45 |
+
2002/07/25/big/img_201
|
46 |
+
2002/08/23/big/img_36
|
47 |
+
2003/01/15/big/img_763
|
48 |
+
2003/01/15/big/img_637
|
49 |
+
2002/08/22/big/img_592
|
50 |
+
2002/07/25/big/img_817
|
51 |
+
2003/01/15/big/img_1219
|
52 |
+
2002/08/05/big/img_3508
|
53 |
+
2002/08/15/big/img_1108
|
54 |
+
2002/07/19/big/img_488
|
55 |
+
2003/01/16/big/img_704
|
56 |
+
2003/01/13/big/img_1087
|
57 |
+
2002/08/10/big/img_670
|
58 |
+
2002/07/24/big/img_104
|
59 |
+
2002/08/27/big/img_19823
|
60 |
+
2002/09/01/big/img_16229
|
61 |
+
2003/01/13/big/img_846
|
62 |
+
2002/08/04/big/img_412
|
63 |
+
2002/07/22/big/img_554
|
64 |
+
2002/08/12/big/img_331
|
65 |
+
2002/08/02/big/img_533
|
66 |
+
2002/08/12/big/img_259
|
67 |
+
2002/08/18/big/img_328
|
68 |
+
2003/01/14/big/img_630
|
69 |
+
2002/08/05/big/img_3541
|
70 |
+
2002/08/06/big/img_2390
|
71 |
+
2002/08/20/big/img_150
|
72 |
+
2002/08/02/big/img_1231
|
73 |
+
2002/08/16/big/img_710
|
74 |
+
2002/08/19/big/img_591
|
75 |
+
2002/07/22/big/img_725
|
76 |
+
2002/07/24/big/img_820
|
77 |
+
2003/01/13/big/img_568
|
78 |
+
2002/08/22/big/img_853
|
79 |
+
2002/08/09/big/img_648
|
80 |
+
2002/08/23/big/img_528
|
81 |
+
2003/01/14/big/img_888
|
82 |
+
2002/08/30/big/img_18201
|
83 |
+
2002/08/13/big/img_965
|
84 |
+
2003/01/14/big/img_660
|
85 |
+
2002/07/19/big/img_517
|
86 |
+
2003/01/14/big/img_406
|
87 |
+
2002/08/30/big/img_18433
|
88 |
+
2002/08/07/big/img_1630
|
89 |
+
2002/08/06/big/img_2717
|
90 |
+
2002/08/21/big/img_470
|
91 |
+
2002/07/23/big/img_633
|
92 |
+
2002/08/20/big/img_915
|
93 |
+
2002/08/16/big/img_893
|
94 |
+
2002/07/29/big/img_644
|
95 |
+
2002/08/15/big/img_529
|
96 |
+
2002/08/16/big/img_668
|
97 |
+
2002/08/07/big/img_1871
|
98 |
+
2002/07/25/big/img_192
|
99 |
+
2002/07/31/big/img_961
|
100 |
+
2002/08/19/big/img_738
|
101 |
+
2002/07/31/big/img_382
|
102 |
+
2002/08/19/big/img_298
|
103 |
+
2003/01/17/big/img_608
|
104 |
+
2002/08/21/big/img_514
|
105 |
+
2002/07/23/big/img_183
|
106 |
+
2003/01/17/big/img_536
|
107 |
+
2002/07/24/big/img_478
|
108 |
+
2002/08/06/big/img_2997
|
109 |
+
2002/09/02/big/img_15380
|
110 |
+
2002/08/07/big/img_1153
|
111 |
+
2002/07/31/big/img_967
|
112 |
+
2002/07/31/big/img_711
|
113 |
+
2002/08/26/big/img_664
|
114 |
+
2003/01/01/big/img_326
|
115 |
+
2002/08/24/big/img_775
|
116 |
+
2002/08/08/big/img_961
|
117 |
+
2002/08/16/big/img_77
|
118 |
+
2002/08/12/big/img_296
|
119 |
+
2002/07/22/big/img_905
|
120 |
+
2003/01/13/big/img_284
|
121 |
+
2002/08/13/big/img_887
|
122 |
+
2002/08/24/big/img_849
|
123 |
+
2002/07/30/big/img_345
|
124 |
+
2002/08/18/big/img_419
|
125 |
+
2002/08/01/big/img_1347
|
126 |
+
2002/08/05/big/img_3670
|
127 |
+
2002/07/21/big/img_479
|
128 |
+
2002/08/08/big/img_913
|
129 |
+
2002/09/02/big/img_15828
|
130 |
+
2002/08/30/big/img_18194
|
131 |
+
2002/08/08/big/img_471
|
132 |
+
2002/08/22/big/img_734
|
133 |
+
2002/08/09/big/img_586
|
134 |
+
2002/08/09/big/img_454
|
135 |
+
2002/07/29/big/img_47
|
136 |
+
2002/07/19/big/img_381
|
137 |
+
2002/07/29/big/img_733
|
138 |
+
2002/08/20/big/img_327
|
139 |
+
2002/07/21/big/img_96
|
140 |
+
2002/08/06/big/img_2680
|
141 |
+
2002/07/25/big/img_919
|
142 |
+
2002/07/21/big/img_158
|
143 |
+
2002/07/22/big/img_801
|
144 |
+
2002/07/22/big/img_567
|
145 |
+
2002/07/24/big/img_804
|
146 |
+
2002/07/24/big/img_690
|
147 |
+
2003/01/15/big/img_576
|
148 |
+
2002/08/14/big/img_335
|
149 |
+
2003/01/13/big/img_390
|
150 |
+
2002/08/11/big/img_258
|
151 |
+
2002/07/23/big/img_917
|
152 |
+
2002/08/15/big/img_525
|
153 |
+
2003/01/15/big/img_505
|
154 |
+
2002/07/30/big/img_886
|
155 |
+
2003/01/16/big/img_640
|
156 |
+
2003/01/14/big/img_642
|
157 |
+
2003/01/17/big/img_844
|
158 |
+
2002/08/04/big/img_571
|
159 |
+
2002/08/29/big/img_18702
|
160 |
+
2003/01/15/big/img_240
|
161 |
+
2002/07/29/big/img_553
|
162 |
+
2002/08/10/big/img_354
|
163 |
+
2002/08/18/big/img_17
|
164 |
+
2003/01/15/big/img_782
|
165 |
+
2002/07/27/big/img_382
|
166 |
+
2002/08/14/big/img_970
|
167 |
+
2003/01/16/big/img_70
|
168 |
+
2003/01/16/big/img_625
|
169 |
+
2002/08/18/big/img_341
|
170 |
+
2002/08/26/big/img_188
|
171 |
+
2002/08/09/big/img_405
|
172 |
+
2002/08/02/big/img_37
|
173 |
+
2002/08/13/big/img_748
|
174 |
+
2002/07/22/big/img_399
|
175 |
+
2002/07/25/big/img_844
|
176 |
+
2002/08/12/big/img_340
|
177 |
+
2003/01/13/big/img_815
|
178 |
+
2002/08/26/big/img_5
|
179 |
+
2002/08/10/big/img_158
|
180 |
+
2002/08/18/big/img_95
|
181 |
+
2002/07/29/big/img_1297
|
182 |
+
2003/01/13/big/img_508
|
183 |
+
2002/09/01/big/img_16680
|
184 |
+
2003/01/16/big/img_338
|
185 |
+
2002/08/13/big/img_517
|
186 |
+
2002/07/22/big/img_626
|
187 |
+
2002/08/06/big/img_3024
|
188 |
+
2002/07/26/big/img_499
|
189 |
+
2003/01/13/big/img_387
|
190 |
+
2002/08/31/big/img_18025
|
191 |
+
2002/08/13/big/img_520
|
192 |
+
2003/01/16/big/img_576
|
193 |
+
2002/07/26/big/img_121
|
194 |
+
2002/08/25/big/img_703
|
195 |
+
2002/08/26/big/img_615
|
196 |
+
2002/08/17/big/img_434
|
197 |
+
2002/08/02/big/img_677
|
198 |
+
2002/08/18/big/img_276
|
199 |
+
2002/08/05/big/img_3672
|
200 |
+
2002/07/26/big/img_700
|
201 |
+
2002/07/31/big/img_277
|
202 |
+
2003/01/14/big/img_220
|
203 |
+
2002/08/23/big/img_232
|
204 |
+
2002/08/31/big/img_17422
|
205 |
+
2002/07/22/big/img_508
|
206 |
+
2002/08/13/big/img_681
|
207 |
+
2003/01/15/big/img_638
|
208 |
+
2002/08/30/big/img_18408
|
209 |
+
2003/01/14/big/img_533
|
210 |
+
2003/01/17/big/img_12
|
211 |
+
2002/08/28/big/img_19388
|
212 |
+
2002/08/08/big/img_133
|
213 |
+
2002/07/26/big/img_885
|
214 |
+
2002/08/19/big/img_387
|
215 |
+
2002/08/27/big/img_19976
|
216 |
+
2002/08/26/big/img_118
|
217 |
+
2002/08/28/big/img_19146
|
218 |
+
2002/08/05/big/img_3259
|
219 |
+
2002/08/15/big/img_536
|
220 |
+
2002/07/22/big/img_279
|
221 |
+
2002/07/22/big/img_9
|
222 |
+
2002/08/13/big/img_301
|
223 |
+
2002/08/15/big/img_974
|
224 |
+
2002/08/06/big/img_2355
|
225 |
+
2002/08/01/big/img_1526
|
226 |
+
2002/08/03/big/img_417
|
227 |
+
2002/08/04/big/img_407
|
228 |
+
2002/08/15/big/img_1029
|
229 |
+
2002/07/29/big/img_700
|
230 |
+
2002/08/01/big/img_1463
|
231 |
+
2002/08/31/big/img_17365
|
232 |
+
2002/07/28/big/img_223
|
233 |
+
2002/07/19/big/img_827
|
234 |
+
2002/07/27/big/img_531
|
235 |
+
2002/07/19/big/img_845
|
236 |
+
2002/08/20/big/img_382
|
237 |
+
2002/07/31/big/img_268
|
238 |
+
2002/08/27/big/img_19705
|
239 |
+
2002/08/02/big/img_830
|
240 |
+
2002/08/23/big/img_250
|
241 |
+
2002/07/20/big/img_777
|
242 |
+
2002/08/21/big/img_879
|
243 |
+
2002/08/26/big/img_20146
|
244 |
+
2002/08/23/big/img_789
|
245 |
+
2002/08/06/big/img_2683
|
246 |
+
2002/08/25/big/img_576
|
247 |
+
2002/08/09/big/img_498
|
248 |
+
2002/08/08/big/img_384
|
249 |
+
2002/08/26/big/img_592
|
250 |
+
2002/07/29/big/img_1470
|
251 |
+
2002/08/21/big/img_452
|
252 |
+
2002/08/30/big/img_18395
|
253 |
+
2002/08/15/big/img_215
|
254 |
+
2002/07/21/big/img_643
|
255 |
+
2002/07/22/big/img_209
|
256 |
+
2003/01/17/big/img_346
|
257 |
+
2002/08/25/big/img_658
|
258 |
+
2002/08/21/big/img_221
|
259 |
+
2002/08/14/big/img_60
|
260 |
+
2003/01/17/big/img_885
|
261 |
+
2003/01/16/big/img_482
|
262 |
+
2002/08/19/big/img_593
|
263 |
+
2002/08/08/big/img_233
|
264 |
+
2002/07/30/big/img_458
|
265 |
+
2002/07/23/big/img_384
|
266 |
+
2003/01/15/big/img_670
|
267 |
+
2003/01/15/big/img_267
|
268 |
+
2002/08/26/big/img_540
|
269 |
+
2002/07/29/big/img_552
|
270 |
+
2002/07/30/big/img_997
|
271 |
+
2003/01/17/big/img_377
|
272 |
+
2002/08/21/big/img_265
|
273 |
+
2002/08/09/big/img_561
|
274 |
+
2002/07/31/big/img_945
|
275 |
+
2002/09/02/big/img_15252
|
276 |
+
2002/08/11/big/img_276
|
277 |
+
2002/07/22/big/img_491
|
278 |
+
2002/07/26/big/img_517
|
279 |
+
2002/08/14/big/img_726
|
280 |
+
2002/08/08/big/img_46
|
281 |
+
2002/08/28/big/img_19458
|
282 |
+
2002/08/06/big/img_2935
|
283 |
+
2002/07/29/big/img_1392
|
284 |
+
2002/08/13/big/img_776
|
285 |
+
2002/08/24/big/img_616
|
286 |
+
2002/08/14/big/img_1065
|
287 |
+
2002/07/29/big/img_889
|
288 |
+
2002/08/18/big/img_188
|
289 |
+
2002/08/07/big/img_1453
|
290 |
+
2002/08/02/big/img_760
|
291 |
+
2002/07/28/big/img_416
|
292 |
+
2002/08/07/big/img_1393
|
293 |
+
2002/08/26/big/img_292
|
294 |
+
2002/08/26/big/img_301
|
295 |
+
2003/01/13/big/img_195
|
296 |
+
2002/07/26/big/img_532
|
297 |
+
2002/08/20/big/img_550
|
298 |
+
2002/08/05/big/img_3658
|
299 |
+
2002/08/26/big/img_738
|
300 |
+
2002/09/02/big/img_15750
|
301 |
+
2003/01/17/big/img_451
|
302 |
+
2002/07/23/big/img_339
|
303 |
+
2002/08/16/big/img_637
|
304 |
+
2002/08/14/big/img_748
|
305 |
+
2002/08/06/big/img_2739
|
306 |
+
2002/07/25/big/img_482
|
307 |
+
2002/08/19/big/img_191
|
308 |
+
2002/08/26/big/img_537
|
309 |
+
2003/01/15/big/img_716
|
310 |
+
2003/01/15/big/img_767
|
311 |
+
2002/08/02/big/img_452
|
312 |
+
2002/08/08/big/img_1011
|
313 |
+
2002/08/10/big/img_144
|
314 |
+
2003/01/14/big/img_122
|
315 |
+
2002/07/24/big/img_586
|
316 |
+
2002/07/24/big/img_762
|
317 |
+
2002/08/20/big/img_369
|
318 |
+
2002/07/30/big/img_146
|
319 |
+
2002/08/23/big/img_396
|
320 |
+
2003/01/15/big/img_200
|
321 |
+
2002/08/15/big/img_1183
|
322 |
+
2003/01/14/big/img_698
|
323 |
+
2002/08/09/big/img_792
|
324 |
+
2002/08/06/big/img_2347
|
325 |
+
2002/07/31/big/img_911
|
326 |
+
2002/08/26/big/img_722
|
327 |
+
2002/08/23/big/img_621
|
328 |
+
2002/08/05/big/img_3790
|
329 |
+
2003/01/13/big/img_633
|
330 |
+
2002/08/09/big/img_224
|
331 |
+
2002/07/24/big/img_454
|
332 |
+
2002/07/21/big/img_202
|
333 |
+
2002/08/02/big/img_630
|
334 |
+
2002/08/30/big/img_18315
|
335 |
+
2002/07/19/big/img_491
|
336 |
+
2002/09/01/big/img_16456
|
337 |
+
2002/08/09/big/img_242
|
338 |
+
2002/07/25/big/img_595
|
339 |
+
2002/07/22/big/img_522
|
340 |
+
2002/08/01/big/img_1593
|
341 |
+
2002/07/29/big/img_336
|
342 |
+
2002/08/15/big/img_448
|
343 |
+
2002/08/28/big/img_19281
|
344 |
+
2002/07/29/big/img_342
|
345 |
+
2002/08/12/big/img_78
|
346 |
+
2003/01/14/big/img_525
|
347 |
+
2002/07/28/big/img_147
|
348 |
+
2002/08/11/big/img_353
|
349 |
+
2002/08/22/big/img_513
|
350 |
+
2002/08/04/big/img_721
|
351 |
+
2002/08/17/big/img_247
|
352 |
+
2003/01/14/big/img_891
|
353 |
+
2002/08/20/big/img_853
|
354 |
+
2002/07/19/big/img_414
|
355 |
+
2002/08/01/big/img_1530
|
356 |
+
2003/01/14/big/img_924
|
357 |
+
2002/08/22/big/img_468
|
358 |
+
2002/08/18/big/img_354
|
359 |
+
2002/08/30/big/img_18193
|
360 |
+
2002/08/23/big/img_492
|
361 |
+
2002/08/15/big/img_871
|
362 |
+
2002/08/12/big/img_494
|
363 |
+
2002/08/06/big/img_2470
|
364 |
+
2002/07/23/big/img_923
|
365 |
+
2002/08/26/big/img_155
|
366 |
+
2002/08/08/big/img_669
|
367 |
+
2002/07/23/big/img_404
|
368 |
+
2002/08/28/big/img_19421
|
369 |
+
2002/08/29/big/img_18993
|
370 |
+
2002/08/25/big/img_416
|
371 |
+
2003/01/17/big/img_434
|
372 |
+
2002/07/29/big/img_1370
|
373 |
+
2002/07/28/big/img_483
|
374 |
+
2002/08/11/big/img_50
|
375 |
+
2002/08/10/big/img_404
|
376 |
+
2002/09/02/big/img_15057
|
377 |
+
2003/01/14/big/img_911
|
378 |
+
2002/09/01/big/img_16697
|
379 |
+
2003/01/16/big/img_665
|
380 |
+
2002/09/01/big/img_16708
|
381 |
+
2002/08/22/big/img_612
|
382 |
+
2002/08/28/big/img_19471
|
383 |
+
2002/08/02/big/img_198
|
384 |
+
2003/01/16/big/img_527
|
385 |
+
2002/08/22/big/img_209
|
386 |
+
2002/08/30/big/img_18205
|
387 |
+
2003/01/14/big/img_114
|
388 |
+
2003/01/14/big/img_1028
|
389 |
+
2003/01/16/big/img_894
|
390 |
+
2003/01/14/big/img_837
|
391 |
+
2002/07/30/big/img_9
|
392 |
+
2002/08/06/big/img_2821
|
393 |
+
2002/08/04/big/img_85
|
394 |
+
2003/01/13/big/img_884
|
395 |
+
2002/07/22/big/img_570
|
396 |
+
2002/08/07/big/img_1773
|
397 |
+
2002/07/26/big/img_208
|
398 |
+
2003/01/17/big/img_946
|
399 |
+
2002/07/19/big/img_930
|
400 |
+
2003/01/01/big/img_698
|
401 |
+
2003/01/17/big/img_612
|
402 |
+
2002/07/19/big/img_372
|
403 |
+
2002/07/30/big/img_721
|
404 |
+
2003/01/14/big/img_649
|
405 |
+
2002/08/19/big/img_4
|
406 |
+
2002/07/25/big/img_1024
|
407 |
+
2003/01/15/big/img_601
|
408 |
+
2002/08/30/big/img_18470
|
409 |
+
2002/07/22/big/img_29
|
410 |
+
2002/08/07/big/img_1686
|
411 |
+
2002/07/20/big/img_294
|
412 |
+
2002/08/14/big/img_800
|
413 |
+
2002/08/19/big/img_353
|
414 |
+
2002/08/19/big/img_350
|
415 |
+
2002/08/05/big/img_3392
|
416 |
+
2002/08/09/big/img_622
|
417 |
+
2003/01/15/big/img_236
|
418 |
+
2002/08/11/big/img_643
|
419 |
+
2002/08/05/big/img_3458
|
420 |
+
2002/08/12/big/img_413
|
421 |
+
2002/08/22/big/img_415
|
422 |
+
2002/08/13/big/img_635
|
423 |
+
2002/08/07/big/img_1198
|
424 |
+
2002/08/04/big/img_873
|
425 |
+
2002/08/12/big/img_407
|
426 |
+
2003/01/15/big/img_346
|
427 |
+
2002/08/02/big/img_275
|
428 |
+
2002/08/17/big/img_997
|
429 |
+
2002/08/21/big/img_958
|
430 |
+
2002/08/20/big/img_579
|
431 |
+
2002/07/29/big/img_142
|
432 |
+
2003/01/14/big/img_1115
|
433 |
+
2002/08/16/big/img_365
|
434 |
+
2002/07/29/big/img_1414
|
435 |
+
2002/08/17/big/img_489
|
436 |
+
2002/08/13/big/img_1010
|
437 |
+
2002/07/31/big/img_276
|
438 |
+
2002/07/25/big/img_1000
|
439 |
+
2002/08/23/big/img_524
|
440 |
+
2002/08/28/big/img_19147
|
441 |
+
2003/01/13/big/img_433
|
442 |
+
2002/08/20/big/img_205
|
443 |
+
2003/01/01/big/img_458
|
444 |
+
2002/07/29/big/img_1449
|
445 |
+
2003/01/16/big/img_696
|
446 |
+
2002/08/28/big/img_19296
|
447 |
+
2002/08/29/big/img_18688
|
448 |
+
2002/08/21/big/img_767
|
449 |
+
2002/08/20/big/img_532
|
450 |
+
2002/08/26/big/img_187
|
451 |
+
2002/07/26/big/img_183
|
452 |
+
2002/07/27/big/img_890
|
453 |
+
2003/01/13/big/img_576
|
454 |
+
2002/07/30/big/img_15
|
455 |
+
2002/07/31/big/img_889
|
456 |
+
2002/08/31/big/img_17759
|
457 |
+
2003/01/14/big/img_1114
|
458 |
+
2002/07/19/big/img_445
|
459 |
+
2002/08/03/big/img_593
|
460 |
+
2002/07/24/big/img_750
|
461 |
+
2002/07/30/big/img_133
|
462 |
+
2002/08/25/big/img_671
|
463 |
+
2002/07/20/big/img_351
|
464 |
+
2002/08/31/big/img_17276
|
465 |
+
2002/08/05/big/img_3231
|
466 |
+
2002/09/02/big/img_15882
|
467 |
+
2002/08/14/big/img_115
|
468 |
+
2002/08/02/big/img_1148
|
469 |
+
2002/07/25/big/img_936
|
470 |
+
2002/07/31/big/img_639
|
471 |
+
2002/08/04/big/img_427
|
472 |
+
2002/08/22/big/img_843
|
473 |
+
2003/01/17/big/img_17
|
474 |
+
2003/01/13/big/img_690
|
475 |
+
2002/08/13/big/img_472
|
476 |
+
2002/08/09/big/img_425
|
477 |
+
2002/08/05/big/img_3450
|
478 |
+
2003/01/17/big/img_439
|
479 |
+
2002/08/13/big/img_539
|
480 |
+
2002/07/28/big/img_35
|
481 |
+
2002/08/16/big/img_241
|
482 |
+
2002/08/06/big/img_2898
|
483 |
+
2003/01/16/big/img_429
|
484 |
+
2002/08/05/big/img_3817
|
485 |
+
2002/08/27/big/img_19919
|
486 |
+
2002/07/19/big/img_422
|
487 |
+
2002/08/15/big/img_560
|
488 |
+
2002/07/23/big/img_750
|
489 |
+
2002/07/30/big/img_353
|
490 |
+
2002/08/05/big/img_43
|
491 |
+
2002/08/23/big/img_305
|
492 |
+
2002/08/01/big/img_2137
|
493 |
+
2002/08/30/big/img_18097
|
494 |
+
2002/08/01/big/img_1389
|
495 |
+
2002/08/02/big/img_308
|
496 |
+
2003/01/14/big/img_652
|
497 |
+
2002/08/01/big/img_1798
|
498 |
+
2003/01/14/big/img_732
|
499 |
+
2003/01/16/big/img_294
|
500 |
+
2002/08/26/big/img_213
|
501 |
+
2002/07/24/big/img_842
|
502 |
+
2003/01/13/big/img_630
|
503 |
+
2003/01/13/big/img_634
|
504 |
+
2002/08/06/big/img_2285
|
505 |
+
2002/08/01/big/img_2162
|
506 |
+
2002/08/30/big/img_18134
|
507 |
+
2002/08/02/big/img_1045
|
508 |
+
2002/08/01/big/img_2143
|
509 |
+
2002/07/25/big/img_135
|
510 |
+
2002/07/20/big/img_645
|
511 |
+
2002/08/05/big/img_3666
|
512 |
+
2002/08/14/big/img_523
|
513 |
+
2002/08/04/big/img_425
|
514 |
+
2003/01/14/big/img_137
|
515 |
+
2003/01/01/big/img_176
|
516 |
+
2002/08/15/big/img_505
|
517 |
+
2002/08/24/big/img_386
|
518 |
+
2002/08/05/big/img_3187
|
519 |
+
2002/08/15/big/img_419
|
520 |
+
2003/01/13/big/img_520
|
521 |
+
2002/08/04/big/img_444
|
522 |
+
2002/08/26/big/img_483
|
523 |
+
2002/08/05/big/img_3449
|
524 |
+
2002/08/30/big/img_18409
|
525 |
+
2002/08/28/big/img_19455
|
526 |
+
2002/08/27/big/img_20090
|
527 |
+
2002/07/23/big/img_625
|
528 |
+
2002/08/24/big/img_205
|
529 |
+
2002/08/08/big/img_938
|
530 |
+
2003/01/13/big/img_527
|
531 |
+
2002/08/07/big/img_1712
|
532 |
+
2002/07/24/big/img_801
|
533 |
+
2002/08/09/big/img_579
|
534 |
+
2003/01/14/big/img_41
|
535 |
+
2003/01/15/big/img_1130
|
536 |
+
2002/07/21/big/img_672
|
537 |
+
2002/08/07/big/img_1590
|
538 |
+
2003/01/01/big/img_532
|
539 |
+
2002/08/02/big/img_529
|
540 |
+
2002/08/05/big/img_3591
|
541 |
+
2002/08/23/big/img_5
|
542 |
+
2003/01/14/big/img_882
|
543 |
+
2002/08/28/big/img_19234
|
544 |
+
2002/07/24/big/img_398
|
545 |
+
2003/01/14/big/img_592
|
546 |
+
2002/08/22/big/img_548
|
547 |
+
2002/08/12/big/img_761
|
548 |
+
2003/01/16/big/img_497
|
549 |
+
2002/08/18/big/img_133
|
550 |
+
2002/08/08/big/img_874
|
551 |
+
2002/07/19/big/img_247
|
552 |
+
2002/08/15/big/img_170
|
553 |
+
2002/08/27/big/img_19679
|
554 |
+
2002/08/20/big/img_246
|
555 |
+
2002/08/24/big/img_358
|
556 |
+
2002/07/29/big/img_599
|
557 |
+
2002/08/01/big/img_1555
|
558 |
+
2002/07/30/big/img_491
|
559 |
+
2002/07/30/big/img_371
|
560 |
+
2003/01/16/big/img_682
|
561 |
+
2002/07/25/big/img_619
|
562 |
+
2003/01/15/big/img_587
|
563 |
+
2002/08/02/big/img_1212
|
564 |
+
2002/08/01/big/img_2152
|
565 |
+
2002/07/25/big/img_668
|
566 |
+
2003/01/16/big/img_574
|
567 |
+
2002/08/28/big/img_19464
|
568 |
+
2002/08/11/big/img_536
|
569 |
+
2002/07/24/big/img_201
|
570 |
+
2002/08/05/big/img_3488
|
571 |
+
2002/07/25/big/img_887
|
572 |
+
2002/07/22/big/img_789
|
573 |
+
2002/07/30/big/img_432
|
574 |
+
2002/08/16/big/img_166
|
575 |
+
2002/09/01/big/img_16333
|
576 |
+
2002/07/26/big/img_1010
|
577 |
+
2002/07/21/big/img_793
|
578 |
+
2002/07/22/big/img_720
|
579 |
+
2002/07/31/big/img_337
|
580 |
+
2002/07/27/big/img_185
|
581 |
+
2002/08/23/big/img_440
|
582 |
+
2002/07/31/big/img_801
|
583 |
+
2002/07/25/big/img_478
|
584 |
+
2003/01/14/big/img_171
|
585 |
+
2002/08/07/big/img_1054
|
586 |
+
2002/09/02/big/img_15659
|
587 |
+
2002/07/29/big/img_1348
|
588 |
+
2002/08/09/big/img_337
|
589 |
+
2002/08/26/big/img_684
|
590 |
+
2002/07/31/big/img_537
|
591 |
+
2002/08/15/big/img_808
|
592 |
+
2003/01/13/big/img_740
|
593 |
+
2002/08/07/big/img_1667
|
594 |
+
2002/08/03/big/img_404
|
595 |
+
2002/08/06/big/img_2520
|
596 |
+
2002/07/19/big/img_230
|
597 |
+
2002/07/19/big/img_356
|
598 |
+
2003/01/16/big/img_627
|
599 |
+
2002/08/04/big/img_474
|
600 |
+
2002/07/29/big/img_833
|
601 |
+
2002/07/25/big/img_176
|
602 |
+
2002/08/01/big/img_1684
|
603 |
+
2002/08/21/big/img_643
|
604 |
+
2002/08/27/big/img_19673
|
605 |
+
2002/08/02/big/img_838
|
606 |
+
2002/08/06/big/img_2378
|
607 |
+
2003/01/15/big/img_48
|
608 |
+
2002/07/30/big/img_470
|
609 |
+
2002/08/15/big/img_963
|
610 |
+
2002/08/24/big/img_444
|
611 |
+
2002/08/16/big/img_662
|
612 |
+
2002/08/15/big/img_1209
|
613 |
+
2002/07/24/big/img_25
|
614 |
+
2002/08/06/big/img_2740
|
615 |
+
2002/07/29/big/img_996
|
616 |
+
2002/08/31/big/img_18074
|
617 |
+
2002/08/04/big/img_343
|
618 |
+
2003/01/17/big/img_509
|
619 |
+
2003/01/13/big/img_726
|
620 |
+
2002/08/07/big/img_1466
|
621 |
+
2002/07/26/big/img_307
|
622 |
+
2002/08/10/big/img_598
|
623 |
+
2002/08/13/big/img_890
|
624 |
+
2002/08/14/big/img_997
|
625 |
+
2002/07/19/big/img_392
|
626 |
+
2002/08/02/big/img_475
|
627 |
+
2002/08/29/big/img_19038
|
628 |
+
2002/07/29/big/img_538
|
629 |
+
2002/07/29/big/img_502
|
630 |
+
2002/08/02/big/img_364
|
631 |
+
2002/08/31/big/img_17353
|
632 |
+
2002/08/08/big/img_539
|
633 |
+
2002/08/01/big/img_1449
|
634 |
+
2002/07/22/big/img_363
|
635 |
+
2002/08/02/big/img_90
|
636 |
+
2002/09/01/big/img_16867
|
637 |
+
2002/08/05/big/img_3371
|
638 |
+
2002/07/30/big/img_342
|
639 |
+
2002/08/07/big/img_1363
|
640 |
+
2002/08/22/big/img_790
|
641 |
+
2003/01/15/big/img_404
|
642 |
+
2002/08/05/big/img_3447
|
643 |
+
2002/09/01/big/img_16167
|
644 |
+
2003/01/13/big/img_840
|
645 |
+
2002/08/22/big/img_1001
|
646 |
+
2002/08/09/big/img_431
|
647 |
+
2002/07/27/big/img_618
|
648 |
+
2002/07/31/big/img_741
|
649 |
+
2002/07/30/big/img_964
|
650 |
+
2002/07/25/big/img_86
|
651 |
+
2002/07/29/big/img_275
|
652 |
+
2002/08/21/big/img_921
|
653 |
+
2002/07/26/big/img_892
|
654 |
+
2002/08/21/big/img_663
|
655 |
+
2003/01/13/big/img_567
|
656 |
+
2003/01/14/big/img_719
|
657 |
+
2002/07/28/big/img_251
|
658 |
+
2003/01/15/big/img_1123
|
659 |
+
2002/07/29/big/img_260
|
660 |
+
2002/08/24/big/img_337
|
661 |
+
2002/08/01/big/img_1914
|
662 |
+
2002/08/13/big/img_373
|
663 |
+
2003/01/15/big/img_589
|
664 |
+
2002/08/13/big/img_906
|
665 |
+
2002/07/26/big/img_270
|
666 |
+
2002/08/26/big/img_313
|
667 |
+
2002/08/25/big/img_694
|
668 |
+
2003/01/01/big/img_327
|
669 |
+
2002/07/23/big/img_261
|
670 |
+
2002/08/26/big/img_642
|
671 |
+
2002/07/29/big/img_918
|
672 |
+
2002/07/23/big/img_455
|
673 |
+
2002/07/24/big/img_612
|
674 |
+
2002/07/23/big/img_534
|
675 |
+
2002/07/19/big/img_534
|
676 |
+
2002/07/19/big/img_726
|
677 |
+
2002/08/01/big/img_2146
|
678 |
+
2002/08/02/big/img_543
|
679 |
+
2003/01/16/big/img_777
|
680 |
+
2002/07/30/big/img_484
|
681 |
+
2002/08/13/big/img_1161
|
682 |
+
2002/07/21/big/img_390
|
683 |
+
2002/08/06/big/img_2288
|
684 |
+
2002/08/21/big/img_677
|
685 |
+
2002/08/13/big/img_747
|
686 |
+
2002/08/15/big/img_1248
|
687 |
+
2002/07/31/big/img_416
|
688 |
+
2002/09/02/big/img_15259
|
689 |
+
2002/08/16/big/img_781
|
690 |
+
2002/08/24/big/img_754
|
691 |
+
2002/07/24/big/img_803
|
692 |
+
2002/08/20/big/img_609
|
693 |
+
2002/08/28/big/img_19571
|
694 |
+
2002/09/01/big/img_16140
|
695 |
+
2002/08/26/big/img_769
|
696 |
+
2002/07/20/big/img_588
|
697 |
+
2002/08/02/big/img_898
|
698 |
+
2002/07/21/big/img_466
|
699 |
+
2002/08/14/big/img_1046
|
700 |
+
2002/07/25/big/img_212
|
701 |
+
2002/08/26/big/img_353
|
702 |
+
2002/08/19/big/img_810
|
703 |
+
2002/08/31/big/img_17824
|
704 |
+
2002/08/12/big/img_631
|
705 |
+
2002/07/19/big/img_828
|
706 |
+
2002/07/24/big/img_130
|
707 |
+
2002/08/25/big/img_580
|
708 |
+
2002/07/31/big/img_699
|
709 |
+
2002/07/23/big/img_808
|
710 |
+
2002/07/31/big/img_377
|
711 |
+
2003/01/16/big/img_570
|
712 |
+
2002/09/01/big/img_16254
|
713 |
+
2002/07/21/big/img_471
|
714 |
+
2002/08/01/big/img_1548
|
715 |
+
2002/08/18/big/img_252
|
716 |
+
2002/08/19/big/img_576
|
717 |
+
2002/08/20/big/img_464
|
718 |
+
2002/07/27/big/img_735
|
719 |
+
2002/08/21/big/img_589
|
720 |
+
2003/01/15/big/img_1192
|
721 |
+
2002/08/09/big/img_302
|
722 |
+
2002/07/31/big/img_594
|
723 |
+
2002/08/23/big/img_19
|
724 |
+
2002/08/29/big/img_18819
|
725 |
+
2002/08/19/big/img_293
|
726 |
+
2002/07/30/big/img_331
|
727 |
+
2002/08/23/big/img_607
|
728 |
+
2002/07/30/big/img_363
|
729 |
+
2002/08/16/big/img_766
|
730 |
+
2003/01/13/big/img_481
|
731 |
+
2002/08/06/big/img_2515
|
732 |
+
2002/09/02/big/img_15913
|
733 |
+
2002/09/02/big/img_15827
|
734 |
+
2002/09/02/big/img_15053
|
735 |
+
2002/08/07/big/img_1576
|
736 |
+
2002/07/23/big/img_268
|
737 |
+
2002/08/21/big/img_152
|
738 |
+
2003/01/15/big/img_578
|
739 |
+
2002/07/21/big/img_589
|
740 |
+
2002/07/20/big/img_548
|
741 |
+
2002/08/27/big/img_19693
|
742 |
+
2002/08/31/big/img_17252
|
743 |
+
2002/07/31/big/img_138
|
744 |
+
2002/07/23/big/img_372
|
745 |
+
2002/08/16/big/img_695
|
746 |
+
2002/07/27/big/img_287
|
747 |
+
2002/08/15/big/img_315
|
748 |
+
2002/08/10/big/img_361
|
749 |
+
2002/07/29/big/img_899
|
750 |
+
2002/08/13/big/img_771
|
751 |
+
2002/08/21/big/img_92
|
752 |
+
2003/01/15/big/img_425
|
753 |
+
2003/01/16/big/img_450
|
754 |
+
2002/09/01/big/img_16942
|
755 |
+
2002/08/02/big/img_51
|
756 |
+
2002/09/02/big/img_15379
|
757 |
+
2002/08/24/big/img_147
|
758 |
+
2002/08/30/big/img_18122
|
759 |
+
2002/07/26/big/img_950
|
760 |
+
2002/08/07/big/img_1400
|
761 |
+
2002/08/17/big/img_468
|
762 |
+
2002/08/15/big/img_470
|
763 |
+
2002/07/30/big/img_318
|
764 |
+
2002/07/22/big/img_644
|
765 |
+
2002/08/27/big/img_19732
|
766 |
+
2002/07/23/big/img_601
|
767 |
+
2002/08/26/big/img_398
|
768 |
+
2002/08/21/big/img_428
|
769 |
+
2002/08/06/big/img_2119
|
770 |
+
2002/08/29/big/img_19103
|
771 |
+
2003/01/14/big/img_933
|
772 |
+
2002/08/11/big/img_674
|
773 |
+
2002/08/28/big/img_19420
|
774 |
+
2002/08/03/big/img_418
|
775 |
+
2002/08/17/big/img_312
|
776 |
+
2002/07/25/big/img_1044
|
777 |
+
2003/01/17/big/img_671
|
778 |
+
2002/08/30/big/img_18297
|
779 |
+
2002/07/25/big/img_755
|
780 |
+
2002/07/23/big/img_471
|
781 |
+
2002/08/21/big/img_39
|
782 |
+
2002/07/26/big/img_699
|
783 |
+
2003/01/14/big/img_33
|
784 |
+
2002/07/31/big/img_411
|
785 |
+
2002/08/16/big/img_645
|
786 |
+
2003/01/17/big/img_116
|
787 |
+
2002/09/02/big/img_15903
|
788 |
+
2002/08/20/big/img_120
|
789 |
+
2002/08/22/big/img_176
|
790 |
+
2002/07/29/big/img_1316
|
791 |
+
2002/08/27/big/img_19914
|
792 |
+
2002/07/22/big/img_719
|
793 |
+
2002/08/28/big/img_19239
|
794 |
+
2003/01/13/big/img_385
|
795 |
+
2002/08/08/big/img_525
|
796 |
+
2002/07/19/big/img_782
|
797 |
+
2002/08/13/big/img_843
|
798 |
+
2002/07/30/big/img_107
|
799 |
+
2002/08/11/big/img_752
|
800 |
+
2002/07/29/big/img_383
|
801 |
+
2002/08/26/big/img_249
|
802 |
+
2002/08/29/big/img_18860
|
803 |
+
2002/07/30/big/img_70
|
804 |
+
2002/07/26/big/img_194
|
805 |
+
2002/08/15/big/img_530
|
806 |
+
2002/08/08/big/img_816
|
807 |
+
2002/07/31/big/img_286
|
808 |
+
2003/01/13/big/img_294
|
809 |
+
2002/07/31/big/img_251
|
810 |
+
2002/07/24/big/img_13
|
811 |
+
2002/08/31/big/img_17938
|
812 |
+
2002/07/22/big/img_642
|
813 |
+
2003/01/14/big/img_728
|
814 |
+
2002/08/18/big/img_47
|
815 |
+
2002/08/22/big/img_306
|
816 |
+
2002/08/20/big/img_348
|
817 |
+
2002/08/15/big/img_764
|
818 |
+
2002/08/08/big/img_163
|
819 |
+
2002/07/23/big/img_531
|
820 |
+
2002/07/23/big/img_467
|
821 |
+
2003/01/16/big/img_743
|
822 |
+
2003/01/13/big/img_535
|
823 |
+
2002/08/02/big/img_523
|
824 |
+
2002/08/22/big/img_120
|
825 |
+
2002/08/11/big/img_496
|
826 |
+
2002/08/29/big/img_19075
|
827 |
+
2002/08/08/big/img_465
|
828 |
+
2002/08/09/big/img_790
|
829 |
+
2002/08/19/big/img_588
|
830 |
+
2002/08/23/big/img_407
|
831 |
+
2003/01/17/big/img_435
|
832 |
+
2002/08/24/big/img_398
|
833 |
+
2002/08/27/big/img_19899
|
834 |
+
2003/01/15/big/img_335
|
835 |
+
2002/08/13/big/img_493
|
836 |
+
2002/09/02/big/img_15460
|
837 |
+
2002/07/31/big/img_470
|
838 |
+
2002/08/05/big/img_3550
|
839 |
+
2002/07/28/big/img_123
|
840 |
+
2002/08/01/big/img_1498
|
841 |
+
2002/08/04/big/img_504
|
842 |
+
2003/01/17/big/img_427
|
843 |
+
2002/08/27/big/img_19708
|
844 |
+
2002/07/27/big/img_861
|
845 |
+
2002/07/25/big/img_685
|
846 |
+
2002/07/31/big/img_207
|
847 |
+
2003/01/14/big/img_745
|
848 |
+
2002/08/31/big/img_17756
|
849 |
+
2002/08/24/big/img_288
|
850 |
+
2002/08/18/big/img_181
|
851 |
+
2002/08/10/big/img_520
|
852 |
+
2002/08/25/big/img_705
|
853 |
+
2002/08/23/big/img_226
|
854 |
+
2002/08/04/big/img_727
|
855 |
+
2002/07/24/big/img_625
|
856 |
+
2002/08/28/big/img_19157
|
857 |
+
2002/08/23/big/img_586
|
858 |
+
2002/07/31/big/img_232
|
859 |
+
2003/01/13/big/img_240
|
860 |
+
2003/01/14/big/img_321
|
861 |
+
2003/01/15/big/img_533
|
862 |
+
2002/07/23/big/img_480
|
863 |
+
2002/07/24/big/img_371
|
864 |
+
2002/08/21/big/img_702
|
865 |
+
2002/08/31/big/img_17075
|
866 |
+
2002/09/02/big/img_15278
|
867 |
+
2002/07/29/big/img_246
|
868 |
+
2003/01/15/big/img_829
|
869 |
+
2003/01/15/big/img_1213
|
870 |
+
2003/01/16/big/img_441
|
871 |
+
2002/08/14/big/img_921
|
872 |
+
2002/07/23/big/img_425
|
873 |
+
2002/08/15/big/img_296
|
874 |
+
2002/07/19/big/img_135
|
875 |
+
2002/07/26/big/img_402
|
876 |
+
2003/01/17/big/img_88
|
877 |
+
2002/08/20/big/img_872
|
878 |
+
2002/08/13/big/img_1110
|
879 |
+
2003/01/16/big/img_1040
|
880 |
+
2002/07/23/big/img_9
|
881 |
+
2002/08/13/big/img_700
|
882 |
+
2002/08/16/big/img_371
|
883 |
+
2002/08/27/big/img_19966
|
884 |
+
2003/01/17/big/img_391
|
885 |
+
2002/08/18/big/img_426
|
886 |
+
2002/08/01/big/img_1618
|
887 |
+
2002/07/21/big/img_754
|
888 |
+
2003/01/14/big/img_1101
|
889 |
+
2003/01/16/big/img_1022
|
890 |
+
2002/07/22/big/img_275
|
891 |
+
2002/08/24/big/img_86
|
892 |
+
2002/08/17/big/img_582
|
893 |
+
2003/01/15/big/img_765
|
894 |
+
2003/01/17/big/img_449
|
895 |
+
2002/07/28/big/img_265
|
896 |
+
2003/01/13/big/img_552
|
897 |
+
2002/07/28/big/img_115
|
898 |
+
2003/01/16/big/img_56
|
899 |
+
2002/08/02/big/img_1232
|
900 |
+
2003/01/17/big/img_925
|
901 |
+
2002/07/22/big/img_445
|
902 |
+
2002/07/25/big/img_957
|
903 |
+
2002/07/20/big/img_589
|
904 |
+
2002/08/31/big/img_17107
|
905 |
+
2002/07/29/big/img_483
|
906 |
+
2002/08/14/big/img_1063
|
907 |
+
2002/08/07/big/img_1545
|
908 |
+
2002/08/14/big/img_680
|
909 |
+
2002/09/01/big/img_16694
|
910 |
+
2002/08/14/big/img_257
|
911 |
+
2002/08/11/big/img_726
|
912 |
+
2002/07/26/big/img_681
|
913 |
+
2002/07/25/big/img_481
|
914 |
+
2003/01/14/big/img_737
|
915 |
+
2002/08/28/big/img_19480
|
916 |
+
2003/01/16/big/img_362
|
917 |
+
2002/08/27/big/img_19865
|
918 |
+
2003/01/01/big/img_547
|
919 |
+
2002/09/02/big/img_15074
|
920 |
+
2002/08/01/big/img_1453
|
921 |
+
2002/08/22/big/img_594
|
922 |
+
2002/08/28/big/img_19263
|
923 |
+
2002/08/13/big/img_478
|
924 |
+
2002/07/29/big/img_1358
|
925 |
+
2003/01/14/big/img_1022
|
926 |
+
2002/08/16/big/img_450
|
927 |
+
2002/08/02/big/img_159
|
928 |
+
2002/07/26/big/img_781
|
929 |
+
2003/01/13/big/img_601
|
930 |
+
2002/08/20/big/img_407
|
931 |
+
2002/08/15/big/img_468
|
932 |
+
2002/08/31/big/img_17902
|
933 |
+
2002/08/16/big/img_81
|
934 |
+
2002/07/25/big/img_987
|
935 |
+
2002/07/25/big/img_500
|
936 |
+
2002/08/02/big/img_31
|
937 |
+
2002/08/18/big/img_538
|
938 |
+
2002/08/08/big/img_54
|
939 |
+
2002/07/23/big/img_686
|
940 |
+
2002/07/24/big/img_836
|
941 |
+
2003/01/17/big/img_734
|
942 |
+
2002/08/16/big/img_1055
|
943 |
+
2003/01/16/big/img_521
|
944 |
+
2002/07/25/big/img_612
|
945 |
+
2002/08/22/big/img_778
|
946 |
+
2002/08/03/big/img_251
|
947 |
+
2002/08/12/big/img_436
|
948 |
+
2002/08/23/big/img_705
|
949 |
+
2002/07/28/big/img_243
|
950 |
+
2002/07/25/big/img_1029
|
951 |
+
2002/08/20/big/img_287
|
952 |
+
2002/08/29/big/img_18739
|
953 |
+
2002/08/05/big/img_3272
|
954 |
+
2002/07/27/big/img_214
|
955 |
+
2003/01/14/big/img_5
|
956 |
+
2002/08/01/big/img_1380
|
957 |
+
2002/08/29/big/img_19097
|
958 |
+
2002/07/30/big/img_486
|
959 |
+
2002/08/29/big/img_18707
|
960 |
+
2002/08/10/big/img_559
|
961 |
+
2002/08/15/big/img_365
|
962 |
+
2002/08/09/big/img_525
|
963 |
+
2002/08/10/big/img_689
|
964 |
+
2002/07/25/big/img_502
|
965 |
+
2002/08/03/big/img_667
|
966 |
+
2002/08/10/big/img_855
|
967 |
+
2002/08/10/big/img_706
|
968 |
+
2002/08/18/big/img_603
|
969 |
+
2003/01/16/big/img_1055
|
970 |
+
2002/08/31/big/img_17890
|
971 |
+
2002/08/15/big/img_761
|
972 |
+
2003/01/15/big/img_489
|
973 |
+
2002/08/26/big/img_351
|
974 |
+
2002/08/01/big/img_1772
|
975 |
+
2002/08/31/big/img_17729
|
976 |
+
2002/07/25/big/img_609
|
977 |
+
2003/01/13/big/img_539
|
978 |
+
2002/07/27/big/img_686
|
979 |
+
2002/07/31/big/img_311
|
980 |
+
2002/08/22/big/img_799
|
981 |
+
2003/01/16/big/img_936
|
982 |
+
2002/08/31/big/img_17813
|
983 |
+
2002/08/04/big/img_862
|
984 |
+
2002/08/09/big/img_332
|
985 |
+
2002/07/20/big/img_148
|
986 |
+
2002/08/12/big/img_426
|
987 |
+
2002/07/24/big/img_69
|
988 |
+
2002/07/27/big/img_685
|
989 |
+
2002/08/02/big/img_480
|
990 |
+
2002/08/26/big/img_154
|
991 |
+
2002/07/24/big/img_598
|
992 |
+
2002/08/01/big/img_1881
|
993 |
+
2002/08/20/big/img_667
|
994 |
+
2003/01/14/big/img_495
|
995 |
+
2002/07/21/big/img_744
|
996 |
+
2002/07/30/big/img_150
|
997 |
+
2002/07/23/big/img_924
|
998 |
+
2002/08/08/big/img_272
|
999 |
+
2002/07/23/big/img_310
|
1000 |
+
2002/07/25/big/img_1011
|
1001 |
+
2002/09/02/big/img_15725
|
1002 |
+
2002/07/19/big/img_814
|
1003 |
+
2002/08/20/big/img_936
|
1004 |
+
2002/07/25/big/img_85
|
1005 |
+
2002/08/24/big/img_662
|
1006 |
+
2002/08/09/big/img_495
|
1007 |
+
2003/01/15/big/img_196
|
1008 |
+
2002/08/16/big/img_707
|
1009 |
+
2002/08/28/big/img_19370
|
1010 |
+
2002/08/06/big/img_2366
|
1011 |
+
2002/08/06/big/img_3012
|
1012 |
+
2002/08/01/big/img_1452
|
1013 |
+
2002/07/31/big/img_742
|
1014 |
+
2002/07/27/big/img_914
|
1015 |
+
2003/01/13/big/img_290
|
1016 |
+
2002/07/31/big/img_288
|
1017 |
+
2002/08/02/big/img_171
|
1018 |
+
2002/08/22/big/img_191
|
1019 |
+
2002/07/27/big/img_1066
|
1020 |
+
2002/08/12/big/img_383
|
1021 |
+
2003/01/17/big/img_1018
|
1022 |
+
2002/08/01/big/img_1785
|
1023 |
+
2002/08/11/big/img_390
|
1024 |
+
2002/08/27/big/img_20037
|
1025 |
+
2002/08/12/big/img_38
|
1026 |
+
2003/01/15/big/img_103
|
1027 |
+
2002/08/26/big/img_31
|
1028 |
+
2002/08/18/big/img_660
|
1029 |
+
2002/07/22/big/img_694
|
1030 |
+
2002/08/15/big/img_24
|
1031 |
+
2002/07/27/big/img_1077
|
1032 |
+
2002/08/01/big/img_1943
|
1033 |
+
2002/07/22/big/img_292
|
1034 |
+
2002/09/01/big/img_16857
|
1035 |
+
2002/07/22/big/img_892
|
1036 |
+
2003/01/14/big/img_46
|
1037 |
+
2002/08/09/big/img_469
|
1038 |
+
2002/08/09/big/img_414
|
1039 |
+
2003/01/16/big/img_40
|
1040 |
+
2002/08/28/big/img_19231
|
1041 |
+
2002/07/27/big/img_978
|
1042 |
+
2002/07/23/big/img_475
|
1043 |
+
2002/07/25/big/img_92
|
1044 |
+
2002/08/09/big/img_799
|
1045 |
+
2002/07/25/big/img_491
|
1046 |
+
2002/08/03/big/img_654
|
1047 |
+
2003/01/15/big/img_687
|
1048 |
+
2002/08/11/big/img_478
|
1049 |
+
2002/08/07/big/img_1664
|
1050 |
+
2002/08/20/big/img_362
|
1051 |
+
2002/08/01/big/img_1298
|
1052 |
+
2003/01/13/big/img_500
|
1053 |
+
2002/08/06/big/img_2896
|
1054 |
+
2002/08/30/big/img_18529
|
1055 |
+
2002/08/16/big/img_1020
|
1056 |
+
2002/07/29/big/img_892
|
1057 |
+
2002/08/29/big/img_18726
|
1058 |
+
2002/07/21/big/img_453
|
1059 |
+
2002/08/17/big/img_437
|
1060 |
+
2002/07/19/big/img_665
|
1061 |
+
2002/07/22/big/img_440
|
1062 |
+
2002/07/19/big/img_582
|
1063 |
+
2002/07/21/big/img_233
|
1064 |
+
2003/01/01/big/img_82
|
1065 |
+
2002/07/25/big/img_341
|
1066 |
+
2002/07/29/big/img_864
|
1067 |
+
2002/08/02/big/img_276
|
1068 |
+
2002/08/29/big/img_18654
|
1069 |
+
2002/07/27/big/img_1024
|
1070 |
+
2002/08/19/big/img_373
|
1071 |
+
2003/01/15/big/img_241
|
1072 |
+
2002/07/25/big/img_84
|
1073 |
+
2002/08/13/big/img_834
|
1074 |
+
2002/08/10/big/img_511
|
1075 |
+
2002/08/01/big/img_1627
|
1076 |
+
2002/08/08/big/img_607
|
1077 |
+
2002/08/06/big/img_2083
|
1078 |
+
2002/08/01/big/img_1486
|
1079 |
+
2002/08/08/big/img_700
|
1080 |
+
2002/08/01/big/img_1954
|
1081 |
+
2002/08/21/big/img_54
|
1082 |
+
2002/07/30/big/img_847
|
1083 |
+
2002/08/28/big/img_19169
|
1084 |
+
2002/07/21/big/img_549
|
1085 |
+
2002/08/03/big/img_693
|
1086 |
+
2002/07/31/big/img_1002
|
1087 |
+
2003/01/14/big/img_1035
|
1088 |
+
2003/01/16/big/img_622
|
1089 |
+
2002/07/30/big/img_1201
|
1090 |
+
2002/08/10/big/img_444
|
1091 |
+
2002/07/31/big/img_374
|
1092 |
+
2002/08/21/big/img_301
|
1093 |
+
2002/08/13/big/img_1095
|
1094 |
+
2003/01/13/big/img_288
|
1095 |
+
2002/07/25/big/img_232
|
1096 |
+
2003/01/13/big/img_967
|
1097 |
+
2002/08/26/big/img_360
|
1098 |
+
2002/08/05/big/img_67
|
1099 |
+
2002/08/29/big/img_18969
|
1100 |
+
2002/07/28/big/img_16
|
1101 |
+
2002/08/16/big/img_515
|
1102 |
+
2002/07/20/big/img_708
|
1103 |
+
2002/08/18/big/img_178
|
1104 |
+
2003/01/15/big/img_509
|
1105 |
+
2002/07/25/big/img_430
|
1106 |
+
2002/08/21/big/img_738
|
1107 |
+
2002/08/16/big/img_886
|
1108 |
+
2002/09/02/big/img_15605
|
1109 |
+
2002/09/01/big/img_16242
|
1110 |
+
2002/08/24/big/img_711
|
1111 |
+
2002/07/25/big/img_90
|
1112 |
+
2002/08/09/big/img_491
|
1113 |
+
2002/07/30/big/img_534
|
1114 |
+
2003/01/13/big/img_474
|
1115 |
+
2002/08/25/big/img_510
|
1116 |
+
2002/08/15/big/img_555
|
1117 |
+
2002/08/02/big/img_775
|
1118 |
+
2002/07/23/big/img_975
|
1119 |
+
2002/08/19/big/img_229
|
1120 |
+
2003/01/17/big/img_860
|
1121 |
+
2003/01/02/big/img_10
|
1122 |
+
2002/07/23/big/img_542
|
1123 |
+
2002/08/06/big/img_2535
|
1124 |
+
2002/07/22/big/img_37
|
1125 |
+
2002/08/06/big/img_2342
|
1126 |
+
2002/08/25/big/img_515
|
1127 |
+
2002/08/25/big/img_336
|
1128 |
+
2002/08/18/big/img_837
|
1129 |
+
2002/08/21/big/img_616
|
1130 |
+
2003/01/17/big/img_24
|
1131 |
+
2002/07/26/big/img_936
|
1132 |
+
2002/08/14/big/img_896
|
1133 |
+
2002/07/29/big/img_465
|
1134 |
+
2002/07/31/big/img_543
|
1135 |
+
2002/08/01/big/img_1411
|
1136 |
+
2002/08/02/big/img_423
|
1137 |
+
2002/08/21/big/img_44
|
1138 |
+
2002/07/31/big/img_11
|
1139 |
+
2003/01/15/big/img_628
|
1140 |
+
2003/01/15/big/img_605
|
1141 |
+
2002/07/30/big/img_571
|
1142 |
+
2002/07/23/big/img_428
|
1143 |
+
2002/08/15/big/img_942
|
1144 |
+
2002/07/26/big/img_531
|
1145 |
+
2003/01/16/big/img_59
|
1146 |
+
2002/08/02/big/img_410
|
1147 |
+
2002/07/31/big/img_230
|
1148 |
+
2002/08/19/big/img_806
|
1149 |
+
2003/01/14/big/img_462
|
1150 |
+
2002/08/16/big/img_370
|
1151 |
+
2002/08/13/big/img_380
|
1152 |
+
2002/08/16/big/img_932
|
1153 |
+
2002/07/19/big/img_393
|
1154 |
+
2002/08/20/big/img_764
|
1155 |
+
2002/08/15/big/img_616
|
1156 |
+
2002/07/26/big/img_267
|
1157 |
+
2002/07/27/big/img_1069
|
1158 |
+
2002/08/14/big/img_1041
|
1159 |
+
2003/01/13/big/img_594
|
1160 |
+
2002/09/01/big/img_16845
|
1161 |
+
2002/08/09/big/img_229
|
1162 |
+
2003/01/16/big/img_639
|
1163 |
+
2002/08/19/big/img_398
|
1164 |
+
2002/08/18/big/img_978
|
1165 |
+
2002/08/24/big/img_296
|
1166 |
+
2002/07/29/big/img_415
|
1167 |
+
2002/07/30/big/img_923
|
1168 |
+
2002/08/18/big/img_575
|
1169 |
+
2002/08/22/big/img_182
|
1170 |
+
2002/07/25/big/img_806
|
1171 |
+
2002/07/22/big/img_49
|
1172 |
+
2002/07/29/big/img_989
|
1173 |
+
2003/01/17/big/img_789
|
1174 |
+
2003/01/15/big/img_503
|
1175 |
+
2002/09/01/big/img_16062
|
1176 |
+
2003/01/17/big/img_794
|
1177 |
+
2002/08/15/big/img_564
|
1178 |
+
2003/01/15/big/img_222
|
1179 |
+
2002/08/01/big/img_1656
|
1180 |
+
2003/01/13/big/img_432
|
1181 |
+
2002/07/19/big/img_426
|
1182 |
+
2002/08/17/big/img_244
|
1183 |
+
2002/08/13/big/img_805
|
1184 |
+
2002/09/02/big/img_15067
|
1185 |
+
2002/08/11/big/img_58
|
1186 |
+
2002/08/22/big/img_636
|
1187 |
+
2002/07/22/big/img_416
|
1188 |
+
2002/08/13/big/img_836
|
1189 |
+
2002/08/26/big/img_363
|
1190 |
+
2002/07/30/big/img_917
|
1191 |
+
2003/01/14/big/img_206
|
1192 |
+
2002/08/12/big/img_311
|
1193 |
+
2002/08/31/big/img_17623
|
1194 |
+
2002/07/29/big/img_661
|
1195 |
+
2003/01/13/big/img_417
|
1196 |
+
2002/08/02/big/img_463
|
1197 |
+
2002/08/02/big/img_669
|
1198 |
+
2002/08/26/big/img_670
|
1199 |
+
2002/08/02/big/img_375
|
1200 |
+
2002/07/19/big/img_209
|
1201 |
+
2002/08/08/big/img_115
|
1202 |
+
2002/08/21/big/img_399
|
1203 |
+
2002/08/20/big/img_911
|
1204 |
+
2002/08/07/big/img_1212
|
1205 |
+
2002/08/20/big/img_578
|
1206 |
+
2002/08/22/big/img_554
|
1207 |
+
2002/08/21/big/img_484
|
1208 |
+
2002/07/25/big/img_450
|
1209 |
+
2002/08/03/big/img_542
|
1210 |
+
2002/08/15/big/img_561
|
1211 |
+
2002/07/23/big/img_360
|
1212 |
+
2002/08/30/big/img_18137
|
1213 |
+
2002/07/25/big/img_250
|
1214 |
+
2002/08/03/big/img_647
|
1215 |
+
2002/08/20/big/img_375
|
1216 |
+
2002/08/14/big/img_387
|
1217 |
+
2002/09/01/big/img_16990
|
1218 |
+
2002/08/28/big/img_19341
|
1219 |
+
2003/01/15/big/img_239
|
1220 |
+
2002/08/20/big/img_528
|
1221 |
+
2002/08/12/big/img_130
|
1222 |
+
2002/09/02/big/img_15108
|
1223 |
+
2003/01/15/big/img_372
|
1224 |
+
2002/08/16/big/img_678
|
1225 |
+
2002/08/04/big/img_623
|
1226 |
+
2002/07/23/big/img_477
|
1227 |
+
2002/08/28/big/img_19590
|
1228 |
+
2003/01/17/big/img_978
|
1229 |
+
2002/09/01/big/img_16692
|
1230 |
+
2002/07/20/big/img_109
|
1231 |
+
2002/08/06/big/img_2660
|
1232 |
+
2003/01/14/big/img_464
|
1233 |
+
2002/08/09/big/img_618
|
1234 |
+
2002/07/22/big/img_722
|
1235 |
+
2002/08/25/big/img_419
|
1236 |
+
2002/08/03/big/img_314
|
1237 |
+
2002/08/25/big/img_40
|
1238 |
+
2002/07/27/big/img_430
|
1239 |
+
2002/08/10/big/img_569
|
1240 |
+
2002/08/23/big/img_398
|
1241 |
+
2002/07/23/big/img_893
|
1242 |
+
2002/08/16/big/img_261
|
1243 |
+
2002/08/06/big/img_2668
|
1244 |
+
2002/07/22/big/img_835
|
1245 |
+
2002/09/02/big/img_15093
|
1246 |
+
2003/01/16/big/img_65
|
1247 |
+
2002/08/21/big/img_448
|
1248 |
+
2003/01/14/big/img_351
|
1249 |
+
2003/01/17/big/img_133
|
1250 |
+
2002/07/28/big/img_493
|
1251 |
+
2003/01/15/big/img_640
|
1252 |
+
2002/09/01/big/img_16880
|
1253 |
+
2002/08/15/big/img_350
|
1254 |
+
2002/08/20/big/img_624
|
1255 |
+
2002/08/25/big/img_604
|
1256 |
+
2002/08/06/big/img_2200
|
1257 |
+
2002/08/23/big/img_290
|
1258 |
+
2002/08/13/big/img_1152
|
1259 |
+
2003/01/14/big/img_251
|
1260 |
+
2002/08/02/big/img_538
|
1261 |
+
2002/08/22/big/img_613
|
1262 |
+
2003/01/13/big/img_351
|
1263 |
+
2002/08/18/big/img_368
|
1264 |
+
2002/07/23/big/img_392
|
1265 |
+
2002/07/25/big/img_198
|
1266 |
+
2002/07/25/big/img_418
|
1267 |
+
2002/08/26/big/img_614
|
1268 |
+
2002/07/23/big/img_405
|
1269 |
+
2003/01/14/big/img_445
|
1270 |
+
2002/07/25/big/img_326
|
1271 |
+
2002/08/10/big/img_734
|
1272 |
+
2003/01/14/big/img_530
|
1273 |
+
2002/08/08/big/img_561
|
1274 |
+
2002/08/29/big/img_18990
|
1275 |
+
2002/08/10/big/img_576
|
1276 |
+
2002/07/29/big/img_1494
|
1277 |
+
2002/07/19/big/img_198
|
1278 |
+
2002/08/10/big/img_562
|
1279 |
+
2002/07/22/big/img_901
|
1280 |
+
2003/01/14/big/img_37
|
1281 |
+
2002/09/02/big/img_15629
|
1282 |
+
2003/01/14/big/img_58
|
1283 |
+
2002/08/01/big/img_1364
|
1284 |
+
2002/07/27/big/img_636
|
1285 |
+
2003/01/13/big/img_241
|
1286 |
+
2002/09/01/big/img_16988
|
1287 |
+
2003/01/13/big/img_560
|
1288 |
+
2002/08/09/big/img_533
|
1289 |
+
2002/07/31/big/img_249
|
1290 |
+
2003/01/17/big/img_1007
|
1291 |
+
2002/07/21/big/img_64
|
1292 |
+
2003/01/13/big/img_537
|
1293 |
+
2003/01/15/big/img_606
|
1294 |
+
2002/08/18/big/img_651
|
1295 |
+
2002/08/24/big/img_405
|
1296 |
+
2002/07/26/big/img_837
|
1297 |
+
2002/08/09/big/img_562
|
1298 |
+
2002/08/01/big/img_1983
|
1299 |
+
2002/08/03/big/img_514
|
1300 |
+
2002/07/29/big/img_314
|
1301 |
+
2002/08/12/big/img_493
|
1302 |
+
2003/01/14/big/img_121
|
1303 |
+
2003/01/14/big/img_479
|
1304 |
+
2002/08/04/big/img_410
|
1305 |
+
2002/07/22/big/img_607
|
1306 |
+
2003/01/17/big/img_417
|
1307 |
+
2002/07/20/big/img_547
|
1308 |
+
2002/08/13/big/img_396
|
1309 |
+
2002/08/31/big/img_17538
|
1310 |
+
2002/08/13/big/img_187
|
1311 |
+
2002/08/12/big/img_328
|
1312 |
+
2003/01/14/big/img_569
|
1313 |
+
2002/07/27/big/img_1081
|
1314 |
+
2002/08/14/big/img_504
|
1315 |
+
2002/08/23/big/img_785
|
1316 |
+
2002/07/26/big/img_339
|
1317 |
+
2002/08/07/big/img_1156
|
1318 |
+
2002/08/07/big/img_1456
|
1319 |
+
2002/08/23/big/img_378
|
1320 |
+
2002/08/27/big/img_19719
|
1321 |
+
2002/07/31/big/img_39
|
1322 |
+
2002/07/31/big/img_883
|
1323 |
+
2003/01/14/big/img_676
|
1324 |
+
2002/07/29/big/img_214
|
1325 |
+
2002/07/26/big/img_669
|
1326 |
+
2002/07/25/big/img_202
|
1327 |
+
2002/08/08/big/img_259
|
1328 |
+
2003/01/17/big/img_943
|
1329 |
+
2003/01/15/big/img_512
|
1330 |
+
2002/08/05/big/img_3295
|
1331 |
+
2002/08/27/big/img_19685
|
1332 |
+
2002/08/08/big/img_277
|
1333 |
+
2002/08/30/big/img_18154
|
1334 |
+
2002/07/22/big/img_663
|
1335 |
+
2002/08/29/big/img_18914
|
1336 |
+
2002/07/31/big/img_908
|
1337 |
+
2002/08/27/big/img_19926
|
1338 |
+
2003/01/13/big/img_791
|
1339 |
+
2003/01/15/big/img_827
|
1340 |
+
2002/08/18/big/img_878
|
1341 |
+
2002/08/14/big/img_670
|
1342 |
+
2002/07/20/big/img_182
|
1343 |
+
2002/08/15/big/img_291
|
1344 |
+
2002/08/06/big/img_2600
|
1345 |
+
2002/07/23/big/img_587
|
1346 |
+
2002/08/14/big/img_577
|
1347 |
+
2003/01/15/big/img_585
|
1348 |
+
2002/07/30/big/img_310
|
1349 |
+
2002/08/03/big/img_658
|
1350 |
+
2002/08/10/big/img_157
|
1351 |
+
2002/08/19/big/img_811
|
1352 |
+
2002/07/29/big/img_1318
|
1353 |
+
2002/08/04/big/img_104
|
1354 |
+
2002/07/30/big/img_332
|
1355 |
+
2002/07/24/big/img_789
|
1356 |
+
2002/07/29/big/img_516
|
1357 |
+
2002/07/23/big/img_843
|
1358 |
+
2002/08/01/big/img_1528
|
1359 |
+
2002/08/13/big/img_798
|
1360 |
+
2002/08/07/big/img_1729
|
1361 |
+
2002/08/28/big/img_19448
|
1362 |
+
2003/01/16/big/img_95
|
1363 |
+
2002/08/12/big/img_473
|
1364 |
+
2002/07/27/big/img_269
|
1365 |
+
2003/01/16/big/img_621
|
1366 |
+
2002/07/29/big/img_772
|
1367 |
+
2002/07/24/big/img_171
|
1368 |
+
2002/07/19/big/img_429
|
1369 |
+
2002/08/07/big/img_1933
|
1370 |
+
2002/08/27/big/img_19629
|
1371 |
+
2002/08/05/big/img_3688
|
1372 |
+
2002/08/07/big/img_1691
|
1373 |
+
2002/07/23/big/img_600
|
1374 |
+
2002/07/29/big/img_666
|
1375 |
+
2002/08/25/big/img_566
|
1376 |
+
2002/08/06/big/img_2659
|
1377 |
+
2002/08/29/big/img_18929
|
1378 |
+
2002/08/16/big/img_407
|
1379 |
+
2002/08/18/big/img_774
|
1380 |
+
2002/08/19/big/img_249
|
1381 |
+
2002/08/06/big/img_2427
|
1382 |
+
2002/08/29/big/img_18899
|
1383 |
+
2002/08/01/big/img_1818
|
1384 |
+
2002/07/31/big/img_108
|
1385 |
+
2002/07/29/big/img_500
|
1386 |
+
2002/08/11/big/img_115
|
1387 |
+
2002/07/19/big/img_521
|
1388 |
+
2002/08/02/big/img_1163
|
1389 |
+
2002/07/22/big/img_62
|
1390 |
+
2002/08/13/big/img_466
|
1391 |
+
2002/08/21/big/img_956
|
1392 |
+
2002/08/23/big/img_602
|
1393 |
+
2002/08/20/big/img_858
|
1394 |
+
2002/07/25/big/img_690
|
1395 |
+
2002/07/19/big/img_130
|
1396 |
+
2002/08/04/big/img_874
|
1397 |
+
2002/07/26/big/img_489
|
1398 |
+
2002/07/22/big/img_548
|
1399 |
+
2002/08/10/big/img_191
|
1400 |
+
2002/07/25/big/img_1051
|
1401 |
+
2002/08/18/big/img_473
|
1402 |
+
2002/08/12/big/img_755
|
1403 |
+
2002/08/18/big/img_413
|
1404 |
+
2002/08/08/big/img_1044
|
1405 |
+
2002/08/17/big/img_680
|
1406 |
+
2002/08/26/big/img_235
|
1407 |
+
2002/08/20/big/img_330
|
1408 |
+
2002/08/22/big/img_344
|
1409 |
+
2002/08/09/big/img_593
|
1410 |
+
2002/07/31/big/img_1006
|
1411 |
+
2002/08/14/big/img_337
|
1412 |
+
2002/08/16/big/img_728
|
1413 |
+
2002/07/24/big/img_834
|
1414 |
+
2002/08/04/big/img_552
|
1415 |
+
2002/09/02/big/img_15213
|
1416 |
+
2002/07/25/big/img_725
|
1417 |
+
2002/08/30/big/img_18290
|
1418 |
+
2003/01/01/big/img_475
|
1419 |
+
2002/07/27/big/img_1083
|
1420 |
+
2002/08/29/big/img_18955
|
1421 |
+
2002/08/31/big/img_17232
|
1422 |
+
2002/08/08/big/img_480
|
1423 |
+
2002/08/01/big/img_1311
|
1424 |
+
2002/07/30/big/img_745
|
1425 |
+
2002/08/03/big/img_649
|
1426 |
+
2002/08/12/big/img_193
|
1427 |
+
2002/07/29/big/img_228
|
1428 |
+
2002/07/25/big/img_836
|
1429 |
+
2002/08/20/big/img_400
|
1430 |
+
2002/07/30/big/img_507
|
1431 |
+
2002/09/02/big/img_15072
|
1432 |
+
2002/07/26/big/img_658
|
1433 |
+
2002/07/28/big/img_503
|
1434 |
+
2002/08/05/big/img_3814
|
1435 |
+
2002/08/24/big/img_745
|
1436 |
+
2003/01/13/big/img_817
|
1437 |
+
2002/08/08/big/img_579
|
1438 |
+
2002/07/22/big/img_251
|
1439 |
+
2003/01/13/big/img_689
|
1440 |
+
2002/07/25/big/img_407
|
1441 |
+
2002/08/13/big/img_1050
|
1442 |
+
2002/08/14/big/img_733
|
1443 |
+
2002/07/24/big/img_82
|
1444 |
+
2003/01/17/big/img_288
|
1445 |
+
2003/01/15/big/img_475
|
1446 |
+
2002/08/14/big/img_620
|
1447 |
+
2002/08/21/big/img_167
|
1448 |
+
2002/07/19/big/img_300
|
1449 |
+
2002/07/26/big/img_219
|
1450 |
+
2002/08/01/big/img_1468
|
1451 |
+
2002/07/23/big/img_260
|
1452 |
+
2002/08/09/big/img_555
|
1453 |
+
2002/07/19/big/img_160
|
1454 |
+
2002/08/02/big/img_1060
|
1455 |
+
2003/01/14/big/img_149
|
1456 |
+
2002/08/15/big/img_346
|
1457 |
+
2002/08/24/big/img_597
|
1458 |
+
2002/08/22/big/img_502
|
1459 |
+
2002/08/30/big/img_18228
|
1460 |
+
2002/07/21/big/img_766
|
1461 |
+
2003/01/15/big/img_841
|
1462 |
+
2002/07/24/big/img_516
|
1463 |
+
2002/08/02/big/img_265
|
1464 |
+
2002/08/15/big/img_1243
|
1465 |
+
2003/01/15/big/img_223
|
1466 |
+
2002/08/04/big/img_236
|
1467 |
+
2002/07/22/big/img_309
|
1468 |
+
2002/07/20/big/img_656
|
1469 |
+
2002/07/31/big/img_412
|
1470 |
+
2002/09/01/big/img_16462
|
1471 |
+
2003/01/16/big/img_431
|
1472 |
+
2002/07/22/big/img_793
|
1473 |
+
2002/08/15/big/img_877
|
1474 |
+
2002/07/26/big/img_282
|
1475 |
+
2002/07/25/big/img_529
|
1476 |
+
2002/08/24/big/img_613
|
1477 |
+
2003/01/17/big/img_700
|
1478 |
+
2002/08/06/big/img_2526
|
1479 |
+
2002/08/24/big/img_394
|
1480 |
+
2002/08/21/big/img_521
|
1481 |
+
2002/08/25/big/img_560
|
1482 |
+
2002/07/29/big/img_966
|
1483 |
+
2002/07/25/big/img_448
|
1484 |
+
2003/01/13/big/img_782
|
1485 |
+
2002/08/21/big/img_296
|
1486 |
+
2002/09/01/big/img_16755
|
1487 |
+
2002/08/05/big/img_3552
|
1488 |
+
2002/09/02/big/img_15823
|
1489 |
+
2003/01/14/big/img_193
|
1490 |
+
2002/07/21/big/img_159
|
1491 |
+
2002/08/02/big/img_564
|
1492 |
+
2002/08/16/big/img_300
|
1493 |
+
2002/07/19/big/img_269
|
1494 |
+
2002/08/13/big/img_676
|
1495 |
+
2002/07/28/big/img_57
|
1496 |
+
2002/08/05/big/img_3318
|
1497 |
+
2002/07/31/big/img_218
|
1498 |
+
2002/08/21/big/img_898
|
1499 |
+
2002/07/29/big/img_109
|
1500 |
+
2002/07/19/big/img_854
|
1501 |
+
2002/08/23/big/img_311
|
1502 |
+
2002/08/14/big/img_318
|
1503 |
+
2002/07/25/big/img_523
|
1504 |
+
2002/07/21/big/img_678
|
1505 |
+
2003/01/17/big/img_690
|
1506 |
+
2002/08/28/big/img_19503
|
1507 |
+
2002/08/18/big/img_251
|
1508 |
+
2002/08/22/big/img_672
|
1509 |
+
2002/08/20/big/img_663
|
1510 |
+
2002/08/02/big/img_148
|
1511 |
+
2002/09/02/big/img_15580
|
1512 |
+
2002/07/25/big/img_778
|
1513 |
+
2002/08/14/big/img_565
|
1514 |
+
2002/08/12/big/img_374
|
1515 |
+
2002/08/13/big/img_1018
|
1516 |
+
2002/08/20/big/img_474
|
1517 |
+
2002/08/25/big/img_33
|
1518 |
+
2002/08/02/big/img_1190
|
1519 |
+
2002/08/08/big/img_864
|
1520 |
+
2002/08/14/big/img_1071
|
1521 |
+
2002/08/30/big/img_18103
|
1522 |
+
2002/08/18/big/img_533
|
1523 |
+
2003/01/16/big/img_650
|
1524 |
+
2002/07/25/big/img_108
|
1525 |
+
2002/07/26/big/img_81
|
1526 |
+
2002/07/27/big/img_543
|
1527 |
+
2002/07/29/big/img_521
|
1528 |
+
2003/01/13/big/img_434
|
1529 |
+
2002/08/26/big/img_674
|
1530 |
+
2002/08/06/big/img_2932
|
1531 |
+
2002/08/07/big/img_1262
|
1532 |
+
2003/01/15/big/img_201
|
1533 |
+
2003/01/16/big/img_673
|
1534 |
+
2002/09/02/big/img_15988
|
1535 |
+
2002/07/29/big/img_1306
|
1536 |
+
2003/01/14/big/img_1072
|
1537 |
+
2002/08/30/big/img_18232
|
1538 |
+
2002/08/05/big/img_3711
|
1539 |
+
2002/07/23/big/img_775
|
1540 |
+
2002/08/01/big/img_16
|
1541 |
+
2003/01/16/big/img_630
|
1542 |
+
2002/08/22/big/img_695
|
1543 |
+
2002/08/14/big/img_51
|
1544 |
+
2002/08/14/big/img_782
|
1545 |
+
2002/08/24/big/img_742
|
1546 |
+
2003/01/14/big/img_512
|
1547 |
+
2003/01/15/big/img_1183
|
1548 |
+
2003/01/15/big/img_714
|
1549 |
+
2002/08/01/big/img_2078
|
1550 |
+
2002/07/31/big/img_682
|
1551 |
+
2002/09/02/big/img_15687
|
1552 |
+
2002/07/26/big/img_518
|
1553 |
+
2002/08/27/big/img_19676
|
1554 |
+
2002/09/02/big/img_15969
|
1555 |
+
2002/08/02/big/img_931
|
1556 |
+
2002/08/25/big/img_508
|
1557 |
+
2002/08/29/big/img_18616
|
1558 |
+
2002/07/22/big/img_839
|
1559 |
+
2002/07/28/big/img_313
|
1560 |
+
2003/01/14/big/img_155
|
1561 |
+
2002/08/02/big/img_1105
|
1562 |
+
2002/08/09/big/img_53
|
1563 |
+
2002/08/16/big/img_469
|
1564 |
+
2002/08/15/big/img_502
|
1565 |
+
2002/08/20/big/img_575
|
1566 |
+
2002/07/25/big/img_138
|
1567 |
+
2003/01/16/big/img_579
|
1568 |
+
2002/07/19/big/img_352
|
1569 |
+
2003/01/14/big/img_762
|
1570 |
+
2003/01/01/big/img_588
|
1571 |
+
2002/08/02/big/img_981
|
1572 |
+
2002/08/21/big/img_447
|
1573 |
+
2002/09/01/big/img_16151
|
1574 |
+
2003/01/14/big/img_769
|
1575 |
+
2002/08/23/big/img_461
|
1576 |
+
2002/08/17/big/img_240
|
1577 |
+
2002/09/02/big/img_15220
|
1578 |
+
2002/07/19/big/img_408
|
1579 |
+
2002/09/02/big/img_15496
|
1580 |
+
2002/07/29/big/img_758
|
1581 |
+
2002/08/28/big/img_19392
|
1582 |
+
2002/08/06/big/img_2723
|
1583 |
+
2002/08/31/big/img_17752
|
1584 |
+
2002/08/23/big/img_469
|
1585 |
+
2002/08/13/big/img_515
|
1586 |
+
2002/09/02/big/img_15551
|
1587 |
+
2002/08/03/big/img_462
|
1588 |
+
2002/07/24/big/img_613
|
1589 |
+
2002/07/22/big/img_61
|
1590 |
+
2002/08/08/big/img_171
|
1591 |
+
2002/08/21/big/img_177
|
1592 |
+
2003/01/14/big/img_105
|
1593 |
+
2002/08/02/big/img_1017
|
1594 |
+
2002/08/22/big/img_106
|
1595 |
+
2002/07/27/big/img_542
|
1596 |
+
2002/07/21/big/img_665
|
1597 |
+
2002/07/23/big/img_595
|
1598 |
+
2002/08/04/big/img_657
|
1599 |
+
2002/08/29/big/img_19002
|
1600 |
+
2003/01/15/big/img_550
|
1601 |
+
2002/08/14/big/img_662
|
1602 |
+
2002/07/20/big/img_425
|
1603 |
+
2002/08/30/big/img_18528
|
1604 |
+
2002/07/26/big/img_611
|
1605 |
+
2002/07/22/big/img_849
|
1606 |
+
2002/08/07/big/img_1655
|
1607 |
+
2002/08/21/big/img_638
|
1608 |
+
2003/01/17/big/img_732
|
1609 |
+
2003/01/01/big/img_496
|
1610 |
+
2002/08/18/big/img_713
|
1611 |
+
2002/08/08/big/img_109
|
1612 |
+
2002/07/27/big/img_1008
|
1613 |
+
2002/07/20/big/img_559
|
1614 |
+
2002/08/16/big/img_699
|
1615 |
+
2002/08/31/big/img_17702
|
1616 |
+
2002/07/31/big/img_1013
|
1617 |
+
2002/08/01/big/img_2027
|
1618 |
+
2002/08/02/big/img_1001
|
1619 |
+
2002/08/03/big/img_210
|
1620 |
+
2002/08/01/big/img_2087
|
1621 |
+
2003/01/14/big/img_199
|
1622 |
+
2002/07/29/big/img_48
|
1623 |
+
2002/07/19/big/img_727
|
1624 |
+
2002/08/09/big/img_249
|
1625 |
+
2002/08/04/big/img_632
|
1626 |
+
2002/08/22/big/img_620
|
1627 |
+
2003/01/01/big/img_457
|
1628 |
+
2002/08/05/big/img_3223
|
1629 |
+
2002/07/27/big/img_240
|
1630 |
+
2002/07/25/big/img_797
|
1631 |
+
2002/08/13/big/img_430
|
1632 |
+
2002/07/25/big/img_615
|
1633 |
+
2002/08/12/big/img_28
|
1634 |
+
2002/07/30/big/img_220
|
1635 |
+
2002/07/24/big/img_89
|
1636 |
+
2002/08/21/big/img_357
|
1637 |
+
2002/08/09/big/img_590
|
1638 |
+
2003/01/13/big/img_525
|
1639 |
+
2002/08/17/big/img_818
|
1640 |
+
2003/01/02/big/img_7
|
1641 |
+
2002/07/26/big/img_636
|
1642 |
+
2003/01/13/big/img_1122
|
1643 |
+
2002/07/23/big/img_810
|
1644 |
+
2002/08/20/big/img_888
|
1645 |
+
2002/07/27/big/img_3
|
1646 |
+
2002/08/15/big/img_451
|
1647 |
+
2002/09/02/big/img_15787
|
1648 |
+
2002/07/31/big/img_281
|
1649 |
+
2002/08/05/big/img_3274
|
1650 |
+
2002/08/07/big/img_1254
|
1651 |
+
2002/07/31/big/img_27
|
1652 |
+
2002/08/01/big/img_1366
|
1653 |
+
2002/07/30/big/img_182
|
1654 |
+
2002/08/27/big/img_19690
|
1655 |
+
2002/07/29/big/img_68
|
1656 |
+
2002/08/23/big/img_754
|
1657 |
+
2002/07/30/big/img_540
|
1658 |
+
2002/08/27/big/img_20063
|
1659 |
+
2002/08/14/big/img_471
|
1660 |
+
2002/08/02/big/img_615
|
1661 |
+
2002/07/30/big/img_186
|
1662 |
+
2002/08/25/big/img_150
|
1663 |
+
2002/07/27/big/img_626
|
1664 |
+
2002/07/20/big/img_225
|
1665 |
+
2003/01/15/big/img_1252
|
1666 |
+
2002/07/19/big/img_367
|
1667 |
+
2003/01/15/big/img_582
|
1668 |
+
2002/08/09/big/img_572
|
1669 |
+
2002/08/08/big/img_428
|
1670 |
+
2003/01/15/big/img_639
|
1671 |
+
2002/08/28/big/img_19245
|
1672 |
+
2002/07/24/big/img_321
|
1673 |
+
2002/08/02/big/img_662
|
1674 |
+
2002/08/08/big/img_1033
|
1675 |
+
2003/01/17/big/img_867
|
1676 |
+
2002/07/22/big/img_652
|
1677 |
+
2003/01/14/big/img_224
|
1678 |
+
2002/08/18/big/img_49
|
1679 |
+
2002/07/26/big/img_46
|
1680 |
+
2002/08/31/big/img_18021
|
1681 |
+
2002/07/25/big/img_151
|
1682 |
+
2002/08/23/big/img_540
|
1683 |
+
2002/08/25/big/img_693
|
1684 |
+
2002/07/23/big/img_340
|
1685 |
+
2002/07/28/big/img_117
|
1686 |
+
2002/09/02/big/img_15768
|
1687 |
+
2002/08/26/big/img_562
|
1688 |
+
2002/07/24/big/img_480
|
1689 |
+
2003/01/15/big/img_341
|
1690 |
+
2002/08/10/big/img_783
|
1691 |
+
2002/08/20/big/img_132
|
1692 |
+
2003/01/14/big/img_370
|
1693 |
+
2002/07/20/big/img_720
|
1694 |
+
2002/08/03/big/img_144
|
1695 |
+
2002/08/20/big/img_538
|
1696 |
+
2002/08/01/big/img_1745
|
1697 |
+
2002/08/11/big/img_683
|
1698 |
+
2002/08/03/big/img_328
|
1699 |
+
2002/08/10/big/img_793
|
1700 |
+
2002/08/14/big/img_689
|
1701 |
+
2002/08/02/big/img_162
|
1702 |
+
2003/01/17/big/img_411
|
1703 |
+
2002/07/31/big/img_361
|
1704 |
+
2002/08/15/big/img_289
|
1705 |
+
2002/08/08/big/img_254
|
1706 |
+
2002/08/15/big/img_996
|
1707 |
+
2002/08/20/big/img_785
|
1708 |
+
2002/07/24/big/img_511
|
1709 |
+
2002/08/06/big/img_2614
|
1710 |
+
2002/08/29/big/img_18733
|
1711 |
+
2002/08/17/big/img_78
|
1712 |
+
2002/07/30/big/img_378
|
1713 |
+
2002/08/31/big/img_17947
|
1714 |
+
2002/08/26/big/img_88
|
1715 |
+
2002/07/30/big/img_558
|
1716 |
+
2002/08/02/big/img_67
|
1717 |
+
2003/01/14/big/img_325
|
1718 |
+
2002/07/29/big/img_1357
|
1719 |
+
2002/07/19/big/img_391
|
1720 |
+
2002/07/30/big/img_307
|
1721 |
+
2003/01/13/big/img_219
|
1722 |
+
2002/07/24/big/img_807
|
1723 |
+
2002/08/23/big/img_543
|
1724 |
+
2002/08/29/big/img_18620
|
1725 |
+
2002/07/22/big/img_769
|
1726 |
+
2002/08/26/big/img_503
|
1727 |
+
2002/07/30/big/img_78
|
1728 |
+
2002/08/14/big/img_1036
|
1729 |
+
2002/08/09/big/img_58
|
1730 |
+
2002/07/24/big/img_616
|
1731 |
+
2002/08/02/big/img_464
|
1732 |
+
2002/07/26/big/img_576
|
1733 |
+
2002/07/22/big/img_273
|
1734 |
+
2003/01/16/big/img_470
|
1735 |
+
2002/07/29/big/img_329
|
1736 |
+
2002/07/30/big/img_1086
|
1737 |
+
2002/07/31/big/img_353
|
1738 |
+
2002/09/02/big/img_15275
|
1739 |
+
2003/01/17/big/img_555
|
1740 |
+
2002/08/26/big/img_212
|
1741 |
+
2002/08/01/big/img_1692
|
1742 |
+
2003/01/15/big/img_600
|
1743 |
+
2002/07/29/big/img_825
|
1744 |
+
2002/08/08/big/img_68
|
1745 |
+
2002/08/10/big/img_719
|
1746 |
+
2002/07/31/big/img_636
|
1747 |
+
2002/07/29/big/img_325
|
1748 |
+
2002/07/21/big/img_515
|
1749 |
+
2002/07/22/big/img_705
|
1750 |
+
2003/01/13/big/img_818
|
1751 |
+
2002/08/09/big/img_486
|
1752 |
+
2002/08/22/big/img_141
|
1753 |
+
2002/07/22/big/img_303
|
1754 |
+
2002/08/09/big/img_393
|
1755 |
+
2002/07/29/big/img_963
|
1756 |
+
2002/08/02/big/img_1215
|
1757 |
+
2002/08/19/big/img_674
|
1758 |
+
2002/08/12/big/img_690
|
1759 |
+
2002/08/21/big/img_637
|
1760 |
+
2002/08/21/big/img_841
|
1761 |
+
2002/08/24/big/img_71
|
1762 |
+
2002/07/25/big/img_596
|
1763 |
+
2002/07/24/big/img_864
|
1764 |
+
2002/08/18/big/img_293
|
1765 |
+
2003/01/14/big/img_657
|
1766 |
+
2002/08/15/big/img_411
|
1767 |
+
2002/08/16/big/img_348
|
1768 |
+
2002/08/05/big/img_3157
|
1769 |
+
2002/07/20/big/img_663
|
1770 |
+
2003/01/13/big/img_654
|
1771 |
+
2003/01/16/big/img_433
|
1772 |
+
2002/08/30/big/img_18200
|
1773 |
+
2002/08/12/big/img_226
|
1774 |
+
2003/01/16/big/img_491
|
1775 |
+
2002/08/08/big/img_666
|
1776 |
+
2002/07/19/big/img_576
|
1777 |
+
2003/01/15/big/img_776
|
1778 |
+
2003/01/16/big/img_899
|
1779 |
+
2002/07/19/big/img_397
|
1780 |
+
2002/08/14/big/img_44
|
1781 |
+
2003/01/15/big/img_762
|
1782 |
+
2002/08/02/big/img_982
|
1783 |
+
2002/09/02/big/img_15234
|
1784 |
+
2002/08/17/big/img_556
|
1785 |
+
2002/08/21/big/img_410
|
1786 |
+
2002/08/21/big/img_386
|
1787 |
+
2002/07/19/big/img_690
|
1788 |
+
2002/08/05/big/img_3052
|
1789 |
+
2002/08/14/big/img_219
|
1790 |
+
2002/08/16/big/img_273
|
1791 |
+
2003/01/15/big/img_752
|
1792 |
+
2002/08/08/big/img_184
|
1793 |
+
2002/07/31/big/img_743
|
1794 |
+
2002/08/23/big/img_338
|
1795 |
+
2003/01/14/big/img_1055
|
1796 |
+
2002/08/05/big/img_3405
|
1797 |
+
2003/01/15/big/img_17
|
1798 |
+
2002/08/03/big/img_141
|
1799 |
+
2002/08/14/big/img_549
|
1800 |
+
2002/07/27/big/img_1034
|
1801 |
+
2002/07/31/big/img_932
|
1802 |
+
2002/08/30/big/img_18487
|
1803 |
+
2002/09/02/big/img_15814
|
1804 |
+
2002/08/01/big/img_2086
|
1805 |
+
2002/09/01/big/img_16535
|
1806 |
+
2002/07/22/big/img_500
|
1807 |
+
2003/01/13/big/img_400
|
1808 |
+
2002/08/25/big/img_607
|
1809 |
+
2002/08/30/big/img_18384
|
1810 |
+
2003/01/14/big/img_951
|
1811 |
+
2002/08/13/big/img_1150
|
1812 |
+
2002/08/08/big/img_1022
|
1813 |
+
2002/08/10/big/img_428
|
1814 |
+
2002/08/28/big/img_19242
|
1815 |
+
2002/08/05/big/img_3098
|
1816 |
+
2002/07/23/big/img_400
|
1817 |
+
2002/08/26/big/img_365
|
1818 |
+
2002/07/20/big/img_318
|
1819 |
+
2002/08/13/big/img_740
|
1820 |
+
2003/01/16/big/img_37
|
1821 |
+
2002/08/26/big/img_274
|
1822 |
+
2002/08/02/big/img_205
|
1823 |
+
2002/08/21/big/img_695
|
1824 |
+
2002/08/06/big/img_2289
|
1825 |
+
2002/08/20/big/img_794
|
1826 |
+
2002/08/18/big/img_438
|
1827 |
+
2002/08/07/big/img_1380
|
1828 |
+
2002/08/02/big/img_737
|
1829 |
+
2002/08/07/big/img_1651
|
1830 |
+
2002/08/15/big/img_1238
|
1831 |
+
2002/08/01/big/img_1681
|
1832 |
+
2002/08/06/big/img_3017
|
1833 |
+
2002/07/23/big/img_706
|
1834 |
+
2002/07/31/big/img_392
|
1835 |
+
2002/08/09/big/img_539
|
1836 |
+
2002/07/29/big/img_835
|
1837 |
+
2002/08/26/big/img_723
|
1838 |
+
2002/08/28/big/img_19235
|
1839 |
+
2003/01/16/big/img_353
|
1840 |
+
2002/08/10/big/img_150
|
1841 |
+
2002/08/29/big/img_19025
|
1842 |
+
2002/08/21/big/img_310
|
1843 |
+
2002/08/10/big/img_823
|
1844 |
+
2002/07/26/big/img_981
|
1845 |
+
2002/08/11/big/img_288
|
1846 |
+
2002/08/19/big/img_534
|
1847 |
+
2002/08/21/big/img_300
|
1848 |
+
2002/07/31/big/img_49
|
1849 |
+
2002/07/30/big/img_469
|
1850 |
+
2002/08/28/big/img_19197
|
1851 |
+
2002/08/25/big/img_205
|
1852 |
+
2002/08/10/big/img_390
|
1853 |
+
2002/08/23/big/img_291
|
1854 |
+
2002/08/26/big/img_230
|
1855 |
+
2002/08/18/big/img_76
|
1856 |
+
2002/07/23/big/img_409
|
1857 |
+
2002/08/14/big/img_1053
|
1858 |
+
2003/01/14/big/img_291
|
1859 |
+
2002/08/10/big/img_503
|
1860 |
+
2002/08/27/big/img_19928
|
1861 |
+
2002/08/03/big/img_563
|
1862 |
+
2002/08/17/big/img_250
|
1863 |
+
2002/08/06/big/img_2381
|
1864 |
+
2002/08/17/big/img_948
|
1865 |
+
2002/08/06/big/img_2710
|
1866 |
+
2002/07/22/big/img_696
|
1867 |
+
2002/07/31/big/img_670
|
1868 |
+
2002/08/12/big/img_594
|
1869 |
+
2002/07/29/big/img_624
|
1870 |
+
2003/01/17/big/img_934
|
1871 |
+
2002/08/03/big/img_584
|
1872 |
+
2002/08/22/big/img_1003
|
1873 |
+
2002/08/05/big/img_3396
|
1874 |
+
2003/01/13/big/img_570
|
1875 |
+
2002/08/02/big/img_219
|
1876 |
+
2002/09/02/big/img_15774
|
1877 |
+
2002/08/16/big/img_818
|
1878 |
+
2002/08/23/big/img_402
|
1879 |
+
2003/01/14/big/img_552
|
1880 |
+
2002/07/29/big/img_71
|
1881 |
+
2002/08/05/big/img_3592
|
1882 |
+
2002/08/16/big/img_80
|
1883 |
+
2002/07/27/big/img_672
|
1884 |
+
2003/01/13/big/img_470
|
1885 |
+
2003/01/16/big/img_702
|
1886 |
+
2002/09/01/big/img_16130
|
1887 |
+
2002/08/08/big/img_240
|
1888 |
+
2002/09/01/big/img_16338
|
1889 |
+
2002/07/26/big/img_312
|
1890 |
+
2003/01/14/big/img_538
|
1891 |
+
2002/07/20/big/img_695
|
1892 |
+
2002/08/30/big/img_18098
|
1893 |
+
2002/08/25/big/img_259
|
1894 |
+
2002/08/16/big/img_1042
|
1895 |
+
2002/08/09/big/img_837
|
1896 |
+
2002/08/31/big/img_17760
|
1897 |
+
2002/07/31/big/img_14
|
1898 |
+
2002/08/09/big/img_361
|
1899 |
+
2003/01/16/big/img_107
|
1900 |
+
2002/08/14/big/img_124
|
1901 |
+
2002/07/19/big/img_463
|
1902 |
+
2003/01/15/big/img_275
|
1903 |
+
2002/07/25/big/img_1151
|
1904 |
+
2002/07/29/big/img_1501
|
1905 |
+
2002/08/27/big/img_19889
|
1906 |
+
2002/08/29/big/img_18603
|
1907 |
+
2003/01/17/big/img_601
|
1908 |
+
2002/08/25/big/img_355
|
1909 |
+
2002/08/08/big/img_297
|
1910 |
+
2002/08/20/big/img_290
|
1911 |
+
2002/07/31/big/img_195
|
1912 |
+
2003/01/01/big/img_336
|
1913 |
+
2002/08/18/big/img_369
|
1914 |
+
2002/07/25/big/img_621
|
1915 |
+
2002/08/11/big/img_508
|
1916 |
+
2003/01/14/big/img_458
|
1917 |
+
2003/01/15/big/img_795
|
1918 |
+
2002/08/12/big/img_498
|
1919 |
+
2002/08/01/big/img_1734
|
1920 |
+
2002/08/02/big/img_246
|
1921 |
+
2002/08/16/big/img_565
|
1922 |
+
2002/08/11/big/img_475
|
1923 |
+
2002/08/22/big/img_408
|
1924 |
+
2002/07/28/big/img_78
|
1925 |
+
2002/07/21/big/img_81
|
1926 |
+
2003/01/14/big/img_697
|
1927 |
+
2002/08/14/big/img_661
|
1928 |
+
2002/08/15/big/img_507
|
1929 |
+
2002/08/19/big/img_55
|
1930 |
+
2002/07/22/big/img_152
|
1931 |
+
2003/01/14/big/img_470
|
1932 |
+
2002/08/03/big/img_379
|
1933 |
+
2002/08/22/big/img_506
|
1934 |
+
2003/01/16/big/img_966
|
1935 |
+
2002/08/18/big/img_698
|
1936 |
+
2002/08/24/big/img_528
|
1937 |
+
2002/08/23/big/img_10
|
1938 |
+
2002/08/01/big/img_1655
|
1939 |
+
2002/08/22/big/img_953
|
1940 |
+
2002/07/19/big/img_630
|
1941 |
+
2002/07/22/big/img_889
|
1942 |
+
2002/08/16/big/img_351
|
1943 |
+
2003/01/16/big/img_83
|
1944 |
+
2002/07/19/big/img_805
|
1945 |
+
2002/08/14/big/img_704
|
1946 |
+
2002/07/19/big/img_389
|
1947 |
+
2002/08/31/big/img_17765
|
1948 |
+
2002/07/29/big/img_606
|
1949 |
+
2003/01/17/big/img_939
|
1950 |
+
2002/09/02/big/img_15081
|
1951 |
+
2002/08/21/big/img_181
|
1952 |
+
2002/07/29/big/img_1321
|
1953 |
+
2002/07/21/big/img_497
|
1954 |
+
2002/07/20/big/img_539
|
1955 |
+
2002/08/24/big/img_119
|
1956 |
+
2002/08/01/big/img_1281
|
1957 |
+
2002/07/26/big/img_207
|
1958 |
+
2002/07/26/big/img_432
|
1959 |
+
2002/07/27/big/img_1006
|
1960 |
+
2002/08/05/big/img_3087
|
1961 |
+
2002/08/14/big/img_252
|
1962 |
+
2002/08/14/big/img_798
|
1963 |
+
2002/07/24/big/img_538
|
1964 |
+
2002/09/02/big/img_15507
|
1965 |
+
2002/08/08/big/img_901
|
1966 |
+
2003/01/14/big/img_557
|
1967 |
+
2002/08/07/big/img_1819
|
1968 |
+
2002/08/04/big/img_470
|
1969 |
+
2002/08/01/big/img_1504
|
1970 |
+
2002/08/16/big/img_1070
|
1971 |
+
2002/08/16/big/img_372
|
1972 |
+
2002/08/23/big/img_416
|
1973 |
+
2002/08/30/big/img_18208
|
1974 |
+
2002/08/01/big/img_2043
|
1975 |
+
2002/07/22/big/img_385
|
1976 |
+
2002/08/22/big/img_466
|
1977 |
+
2002/08/21/big/img_869
|
1978 |
+
2002/08/28/big/img_19429
|
1979 |
+
2002/08/02/big/img_770
|
1980 |
+
2002/07/23/big/img_433
|
1981 |
+
2003/01/14/big/img_13
|
1982 |
+
2002/07/27/big/img_953
|
1983 |
+
2002/09/02/big/img_15728
|
1984 |
+
2002/08/01/big/img_1361
|
1985 |
+
2002/08/29/big/img_18897
|
1986 |
+
2002/08/26/big/img_534
|
1987 |
+
2002/08/11/big/img_121
|
1988 |
+
2002/08/26/big/img_20130
|
1989 |
+
2002/07/31/big/img_363
|
1990 |
+
2002/08/13/big/img_978
|
1991 |
+
2002/07/25/big/img_835
|
1992 |
+
2002/08/02/big/img_906
|
1993 |
+
2003/01/14/big/img_548
|
1994 |
+
2002/07/30/big/img_80
|
1995 |
+
2002/07/26/big/img_982
|
1996 |
+
2003/01/16/big/img_99
|
1997 |
+
2002/08/19/big/img_362
|
1998 |
+
2002/08/24/big/img_376
|
1999 |
+
2002/08/07/big/img_1264
|
2000 |
+
2002/07/27/big/img_938
|
2001 |
+
2003/01/17/big/img_535
|
2002 |
+
2002/07/26/big/img_457
|
2003 |
+
2002/08/08/big/img_848
|
2004 |
+
2003/01/15/big/img_859
|
2005 |
+
2003/01/15/big/img_622
|
2006 |
+
2002/07/30/big/img_403
|
2007 |
+
2002/07/29/big/img_217
|
2008 |
+
2002/07/26/big/img_891
|
2009 |
+
2002/07/24/big/img_70
|
2010 |
+
2002/08/25/big/img_619
|
2011 |
+
2002/08/05/big/img_3375
|
2012 |
+
2002/08/01/big/img_2160
|
2013 |
+
2002/08/06/big/img_2227
|
2014 |
+
2003/01/14/big/img_117
|
2015 |
+
2002/08/14/big/img_227
|
2016 |
+
2002/08/13/big/img_565
|
2017 |
+
2002/08/19/big/img_625
|
2018 |
+
2002/08/03/big/img_812
|
2019 |
+
2002/07/24/big/img_41
|
2020 |
+
2002/08/16/big/img_235
|
2021 |
+
2002/07/29/big/img_759
|
2022 |
+
2002/07/21/big/img_433
|
2023 |
+
2002/07/29/big/img_190
|
2024 |
+
2003/01/16/big/img_435
|
2025 |
+
2003/01/13/big/img_708
|
2026 |
+
2002/07/30/big/img_57
|
2027 |
+
2002/08/22/big/img_162
|
2028 |
+
2003/01/01/big/img_558
|
2029 |
+
2003/01/15/big/img_604
|
2030 |
+
2002/08/16/big/img_935
|
2031 |
+
2002/08/20/big/img_394
|
2032 |
+
2002/07/28/big/img_465
|
2033 |
+
2002/09/02/big/img_15534
|
2034 |
+
2002/08/16/big/img_87
|
2035 |
+
2002/07/22/big/img_469
|
2036 |
+
2002/08/12/big/img_245
|
2037 |
+
2003/01/13/big/img_236
|
2038 |
+
2002/08/06/big/img_2736
|
2039 |
+
2002/08/03/big/img_348
|
2040 |
+
2003/01/14/big/img_218
|
2041 |
+
2002/07/26/big/img_232
|
2042 |
+
2003/01/15/big/img_244
|
2043 |
+
2002/07/25/big/img_1121
|
2044 |
+
2002/08/01/big/img_1484
|
2045 |
+
2002/07/26/big/img_541
|
2046 |
+
2002/08/07/big/img_1244
|
2047 |
+
2002/07/31/big/img_3
|
2048 |
+
2002/08/30/big/img_18437
|
2049 |
+
2002/08/29/big/img_19094
|
2050 |
+
2002/08/01/big/img_1355
|
2051 |
+
2002/08/19/big/img_338
|
2052 |
+
2002/07/19/big/img_255
|
2053 |
+
2002/07/21/big/img_76
|
2054 |
+
2002/08/25/big/img_199
|
2055 |
+
2002/08/12/big/img_740
|
2056 |
+
2002/07/30/big/img_852
|
2057 |
+
2002/08/15/big/img_599
|
2058 |
+
2002/08/23/big/img_254
|
2059 |
+
2002/08/19/big/img_125
|
2060 |
+
2002/07/24/big/img_2
|
2061 |
+
2002/08/04/big/img_145
|
2062 |
+
2002/08/05/big/img_3137
|
2063 |
+
2002/07/28/big/img_463
|
2064 |
+
2003/01/14/big/img_801
|
2065 |
+
2002/07/23/big/img_366
|
2066 |
+
2002/08/26/big/img_600
|
2067 |
+
2002/08/26/big/img_649
|
2068 |
+
2002/09/02/big/img_15849
|
2069 |
+
2002/07/26/big/img_248
|
2070 |
+
2003/01/13/big/img_200
|
2071 |
+
2002/08/07/big/img_1794
|
2072 |
+
2002/08/31/big/img_17270
|
2073 |
+
2002/08/23/big/img_608
|
2074 |
+
2003/01/13/big/img_837
|
2075 |
+
2002/08/23/big/img_581
|
2076 |
+
2002/08/20/big/img_754
|
2077 |
+
2002/08/18/big/img_183
|
2078 |
+
2002/08/20/big/img_328
|
2079 |
+
2002/07/22/big/img_494
|
2080 |
+
2002/07/29/big/img_399
|
2081 |
+
2002/08/28/big/img_19284
|
2082 |
+
2002/08/08/big/img_566
|
2083 |
+
2002/07/25/big/img_376
|
2084 |
+
2002/07/23/big/img_138
|
2085 |
+
2002/07/25/big/img_435
|
2086 |
+
2002/08/17/big/img_685
|
2087 |
+
2002/07/19/big/img_90
|
2088 |
+
2002/07/20/big/img_716
|
2089 |
+
2002/08/31/big/img_17458
|
2090 |
+
2002/08/26/big/img_461
|
2091 |
+
2002/07/25/big/img_355
|
2092 |
+
2002/08/06/big/img_2152
|
2093 |
+
2002/07/27/big/img_932
|
2094 |
+
2002/07/23/big/img_232
|
2095 |
+
2002/08/08/big/img_1020
|
2096 |
+
2002/07/31/big/img_366
|
2097 |
+
2002/08/06/big/img_2667
|
2098 |
+
2002/08/21/big/img_465
|
2099 |
+
2002/08/15/big/img_305
|
2100 |
+
2002/08/02/big/img_247
|
2101 |
+
2002/07/28/big/img_46
|
2102 |
+
2002/08/27/big/img_19922
|
2103 |
+
2002/08/23/big/img_643
|
2104 |
+
2003/01/13/big/img_624
|
2105 |
+
2002/08/23/big/img_625
|
2106 |
+
2002/08/05/big/img_3787
|
2107 |
+
2003/01/13/big/img_627
|
2108 |
+
2002/09/01/big/img_16381
|
2109 |
+
2002/08/05/big/img_3668
|
2110 |
+
2002/07/21/big/img_535
|
2111 |
+
2002/08/27/big/img_19680
|
2112 |
+
2002/07/22/big/img_413
|
2113 |
+
2002/07/29/big/img_481
|
2114 |
+
2003/01/15/big/img_496
|
2115 |
+
2002/07/23/big/img_701
|
2116 |
+
2002/08/29/big/img_18670
|
2117 |
+
2002/07/28/big/img_319
|
2118 |
+
2003/01/14/big/img_517
|
2119 |
+
2002/07/26/big/img_256
|
2120 |
+
2003/01/16/big/img_593
|
2121 |
+
2002/07/30/big/img_956
|
2122 |
+
2002/07/30/big/img_667
|
2123 |
+
2002/07/25/big/img_100
|
2124 |
+
2002/08/11/big/img_570
|
2125 |
+
2002/07/26/big/img_745
|
2126 |
+
2002/08/04/big/img_834
|
2127 |
+
2002/08/25/big/img_521
|
2128 |
+
2002/08/01/big/img_2148
|
2129 |
+
2002/09/02/big/img_15183
|
2130 |
+
2002/08/22/big/img_514
|
2131 |
+
2002/08/23/big/img_477
|
2132 |
+
2002/07/23/big/img_336
|
2133 |
+
2002/07/26/big/img_481
|
2134 |
+
2002/08/20/big/img_409
|
2135 |
+
2002/07/23/big/img_918
|
2136 |
+
2002/08/09/big/img_474
|
2137 |
+
2002/08/02/big/img_929
|
2138 |
+
2002/08/31/big/img_17932
|
2139 |
+
2002/08/19/big/img_161
|
2140 |
+
2002/08/09/big/img_667
|
2141 |
+
2002/07/31/big/img_805
|
2142 |
+
2002/09/02/big/img_15678
|
2143 |
+
2002/08/31/big/img_17509
|
2144 |
+
2002/08/29/big/img_18998
|
2145 |
+
2002/07/23/big/img_301
|
2146 |
+
2002/08/07/big/img_1612
|
2147 |
+
2002/08/06/big/img_2472
|
2148 |
+
2002/07/23/big/img_466
|
2149 |
+
2002/08/27/big/img_19634
|
2150 |
+
2003/01/16/big/img_16
|
2151 |
+
2002/08/14/big/img_193
|
2152 |
+
2002/08/21/big/img_340
|
2153 |
+
2002/08/27/big/img_19799
|
2154 |
+
2002/08/01/big/img_1345
|
2155 |
+
2002/08/07/big/img_1448
|
2156 |
+
2002/08/11/big/img_324
|
2157 |
+
2003/01/16/big/img_754
|
2158 |
+
2002/08/13/big/img_418
|
2159 |
+
2003/01/16/big/img_544
|
2160 |
+
2002/08/19/big/img_135
|
2161 |
+
2002/08/10/big/img_455
|
2162 |
+
2002/08/10/big/img_693
|
2163 |
+
2002/08/31/big/img_17967
|
2164 |
+
2002/08/28/big/img_19229
|
2165 |
+
2002/08/04/big/img_811
|
2166 |
+
2002/09/01/big/img_16225
|
2167 |
+
2003/01/16/big/img_428
|
2168 |
+
2002/09/02/big/img_15295
|
2169 |
+
2002/07/26/big/img_108
|
2170 |
+
2002/07/21/big/img_477
|
2171 |
+
2002/08/07/big/img_1354
|
2172 |
+
2002/08/23/big/img_246
|
2173 |
+
2002/08/16/big/img_652
|
2174 |
+
2002/07/27/big/img_553
|
2175 |
+
2002/07/31/big/img_346
|
2176 |
+
2002/08/04/big/img_537
|
2177 |
+
2002/08/08/big/img_498
|
2178 |
+
2002/08/29/big/img_18956
|
2179 |
+
2003/01/13/big/img_922
|
2180 |
+
2002/08/31/big/img_17425
|
2181 |
+
2002/07/26/big/img_438
|
2182 |
+
2002/08/19/big/img_185
|
2183 |
+
2003/01/16/big/img_33
|
2184 |
+
2002/08/10/big/img_252
|
2185 |
+
2002/07/29/big/img_598
|
2186 |
+
2002/08/27/big/img_19820
|
2187 |
+
2002/08/06/big/img_2664
|
2188 |
+
2002/08/20/big/img_705
|
2189 |
+
2003/01/14/big/img_816
|
2190 |
+
2002/08/03/big/img_552
|
2191 |
+
2002/07/25/big/img_561
|
2192 |
+
2002/07/25/big/img_934
|
2193 |
+
2002/08/01/big/img_1893
|
2194 |
+
2003/01/14/big/img_746
|
2195 |
+
2003/01/16/big/img_519
|
2196 |
+
2002/08/03/big/img_681
|
2197 |
+
2002/07/24/big/img_808
|
2198 |
+
2002/08/14/big/img_803
|
2199 |
+
2002/08/25/big/img_155
|
2200 |
+
2002/07/30/big/img_1107
|
2201 |
+
2002/08/29/big/img_18882
|
2202 |
+
2003/01/15/big/img_598
|
2203 |
+
2002/08/19/big/img_122
|
2204 |
+
2002/07/30/big/img_428
|
2205 |
+
2002/07/24/big/img_684
|
2206 |
+
2002/08/22/big/img_192
|
2207 |
+
2002/08/22/big/img_543
|
2208 |
+
2002/08/07/big/img_1318
|
2209 |
+
2002/08/18/big/img_25
|
2210 |
+
2002/07/26/big/img_583
|
2211 |
+
2002/07/20/big/img_464
|
2212 |
+
2002/08/19/big/img_664
|
2213 |
+
2002/08/24/big/img_861
|
2214 |
+
2002/09/01/big/img_16136
|
2215 |
+
2002/08/22/big/img_400
|
2216 |
+
2002/08/12/big/img_445
|
2217 |
+
2003/01/14/big/img_174
|
2218 |
+
2002/08/27/big/img_19677
|
2219 |
+
2002/08/31/big/img_17214
|
2220 |
+
2002/08/30/big/img_18175
|
2221 |
+
2003/01/17/big/img_402
|
2222 |
+
2002/08/06/big/img_2396
|
2223 |
+
2002/08/18/big/img_448
|
2224 |
+
2002/08/21/big/img_165
|
2225 |
+
2002/08/31/big/img_17609
|
2226 |
+
2003/01/01/big/img_151
|
2227 |
+
2002/08/26/big/img_372
|
2228 |
+
2002/09/02/big/img_15994
|
2229 |
+
2002/07/26/big/img_660
|
2230 |
+
2002/09/02/big/img_15197
|
2231 |
+
2002/07/29/big/img_258
|
2232 |
+
2002/08/30/big/img_18525
|
2233 |
+
2003/01/13/big/img_368
|
2234 |
+
2002/07/29/big/img_1538
|
2235 |
+
2002/07/21/big/img_787
|
2236 |
+
2002/08/18/big/img_152
|
2237 |
+
2002/08/06/big/img_2379
|
2238 |
+
2003/01/17/big/img_864
|
2239 |
+
2002/08/27/big/img_19998
|
2240 |
+
2002/08/01/big/img_1634
|
2241 |
+
2002/07/25/big/img_414
|
2242 |
+
2002/08/22/big/img_627
|
2243 |
+
2002/08/07/big/img_1669
|
2244 |
+
2002/08/16/big/img_1052
|
2245 |
+
2002/08/31/big/img_17796
|
2246 |
+
2002/08/18/big/img_199
|
2247 |
+
2002/09/02/big/img_15147
|
2248 |
+
2002/08/09/big/img_460
|
2249 |
+
2002/08/14/big/img_581
|
2250 |
+
2002/08/30/big/img_18286
|
2251 |
+
2002/07/26/big/img_337
|
2252 |
+
2002/08/18/big/img_589
|
2253 |
+
2003/01/14/big/img_866
|
2254 |
+
2002/07/20/big/img_624
|
2255 |
+
2002/08/01/big/img_1801
|
2256 |
+
2002/07/24/big/img_683
|
2257 |
+
2002/08/09/big/img_725
|
2258 |
+
2003/01/14/big/img_34
|
2259 |
+
2002/07/30/big/img_144
|
2260 |
+
2002/07/30/big/img_706
|
2261 |
+
2002/08/08/big/img_394
|
2262 |
+
2002/08/19/big/img_619
|
2263 |
+
2002/08/06/big/img_2703
|
2264 |
+
2002/08/29/big/img_19034
|
2265 |
+
2002/07/24/big/img_67
|
2266 |
+
2002/08/27/big/img_19841
|
2267 |
+
2002/08/19/big/img_427
|
2268 |
+
2003/01/14/big/img_333
|
2269 |
+
2002/09/01/big/img_16406
|
2270 |
+
2002/07/19/big/img_882
|
2271 |
+
2002/08/17/big/img_238
|
2272 |
+
2003/01/14/big/img_739
|
2273 |
+
2002/07/22/big/img_151
|
2274 |
+
2002/08/21/big/img_743
|
2275 |
+
2002/07/25/big/img_1048
|
2276 |
+
2002/07/30/big/img_395
|
2277 |
+
2003/01/13/big/img_584
|
2278 |
+
2002/08/13/big/img_742
|
2279 |
+
2002/08/13/big/img_1168
|
2280 |
+
2003/01/14/big/img_147
|
2281 |
+
2002/07/26/big/img_803
|
2282 |
+
2002/08/05/big/img_3298
|
2283 |
+
2002/08/07/big/img_1451
|
2284 |
+
2002/08/16/big/img_424
|
2285 |
+
2002/07/29/big/img_1069
|
2286 |
+
2002/09/01/big/img_16735
|
2287 |
+
2002/07/21/big/img_637
|
2288 |
+
2003/01/14/big/img_585
|
2289 |
+
2002/08/02/big/img_358
|
2290 |
+
2003/01/13/big/img_358
|
2291 |
+
2002/08/14/big/img_198
|
2292 |
+
2002/08/17/big/img_935
|
2293 |
+
2002/08/04/big/img_42
|
2294 |
+
2002/08/30/big/img_18245
|
2295 |
+
2002/07/25/big/img_158
|
2296 |
+
2002/08/22/big/img_744
|
2297 |
+
2002/08/06/big/img_2291
|
2298 |
+
2002/08/05/big/img_3044
|
2299 |
+
2002/07/30/big/img_272
|
2300 |
+
2002/08/23/big/img_641
|
2301 |
+
2002/07/24/big/img_797
|
2302 |
+
2002/07/30/big/img_392
|
2303 |
+
2003/01/14/big/img_447
|
2304 |
+
2002/07/31/big/img_898
|
2305 |
+
2002/08/06/big/img_2812
|
2306 |
+
2002/08/13/big/img_564
|
2307 |
+
2002/07/22/big/img_43
|
2308 |
+
2002/07/26/big/img_634
|
2309 |
+
2002/07/19/big/img_843
|
2310 |
+
2002/08/26/big/img_58
|
2311 |
+
2002/07/21/big/img_375
|
2312 |
+
2002/08/25/big/img_729
|
2313 |
+
2002/07/19/big/img_561
|
2314 |
+
2003/01/15/big/img_884
|
2315 |
+
2002/07/25/big/img_891
|
2316 |
+
2002/08/09/big/img_558
|
2317 |
+
2002/08/26/big/img_587
|
2318 |
+
2002/08/13/big/img_1146
|
2319 |
+
2002/09/02/big/img_15153
|
2320 |
+
2002/07/26/big/img_316
|
2321 |
+
2002/08/01/big/img_1940
|
2322 |
+
2002/08/26/big/img_90
|
2323 |
+
2003/01/13/big/img_347
|
2324 |
+
2002/07/25/big/img_520
|
2325 |
+
2002/08/29/big/img_18718
|
2326 |
+
2002/08/28/big/img_19219
|
2327 |
+
2002/08/13/big/img_375
|
2328 |
+
2002/07/20/big/img_719
|
2329 |
+
2002/08/31/big/img_17431
|
2330 |
+
2002/07/28/big/img_192
|
2331 |
+
2002/08/26/big/img_259
|
2332 |
+
2002/08/18/big/img_484
|
2333 |
+
2002/07/29/big/img_580
|
2334 |
+
2002/07/26/big/img_84
|
2335 |
+
2002/08/02/big/img_302
|
2336 |
+
2002/08/31/big/img_17007
|
2337 |
+
2003/01/15/big/img_543
|
2338 |
+
2002/09/01/big/img_16488
|
2339 |
+
2002/08/22/big/img_798
|
2340 |
+
2002/07/30/big/img_383
|
2341 |
+
2002/08/04/big/img_668
|
2342 |
+
2002/08/13/big/img_156
|
2343 |
+
2002/08/07/big/img_1353
|
2344 |
+
2002/07/25/big/img_281
|
2345 |
+
2003/01/14/big/img_587
|
2346 |
+
2003/01/15/big/img_524
|
2347 |
+
2002/08/19/big/img_726
|
2348 |
+
2002/08/21/big/img_709
|
2349 |
+
2002/08/26/big/img_465
|
2350 |
+
2002/07/31/big/img_658
|
2351 |
+
2002/08/28/big/img_19148
|
2352 |
+
2002/07/23/big/img_423
|
2353 |
+
2002/08/16/big/img_758
|
2354 |
+
2002/08/22/big/img_523
|
2355 |
+
2002/08/16/big/img_591
|
2356 |
+
2002/08/23/big/img_845
|
2357 |
+
2002/07/26/big/img_678
|
2358 |
+
2002/08/09/big/img_806
|
2359 |
+
2002/08/06/big/img_2369
|
2360 |
+
2002/07/29/big/img_457
|
2361 |
+
2002/07/19/big/img_278
|
2362 |
+
2002/08/30/big/img_18107
|
2363 |
+
2002/07/26/big/img_444
|
2364 |
+
2002/08/20/big/img_278
|
2365 |
+
2002/08/26/big/img_92
|
2366 |
+
2002/08/26/big/img_257
|
2367 |
+
2002/07/25/big/img_266
|
2368 |
+
2002/08/05/big/img_3829
|
2369 |
+
2002/07/26/big/img_757
|
2370 |
+
2002/07/29/big/img_1536
|
2371 |
+
2002/08/09/big/img_472
|
2372 |
+
2003/01/17/big/img_480
|
2373 |
+
2002/08/28/big/img_19355
|
2374 |
+
2002/07/26/big/img_97
|
2375 |
+
2002/08/06/big/img_2503
|
2376 |
+
2002/07/19/big/img_254
|
2377 |
+
2002/08/01/big/img_1470
|
2378 |
+
2002/08/21/big/img_42
|
2379 |
+
2002/08/20/big/img_217
|
2380 |
+
2002/08/06/big/img_2459
|
2381 |
+
2002/07/19/big/img_552
|
2382 |
+
2002/08/13/big/img_717
|
2383 |
+
2002/08/12/big/img_586
|
2384 |
+
2002/08/20/big/img_411
|
2385 |
+
2003/01/13/big/img_768
|
2386 |
+
2002/08/07/big/img_1747
|
2387 |
+
2002/08/15/big/img_385
|
2388 |
+
2002/08/01/big/img_1648
|
2389 |
+
2002/08/15/big/img_311
|
2390 |
+
2002/08/21/big/img_95
|
2391 |
+
2002/08/09/big/img_108
|
2392 |
+
2002/08/21/big/img_398
|
2393 |
+
2002/08/17/big/img_340
|
2394 |
+
2002/08/14/big/img_474
|
2395 |
+
2002/08/13/big/img_294
|
2396 |
+
2002/08/24/big/img_840
|
2397 |
+
2002/08/09/big/img_808
|
2398 |
+
2002/08/23/big/img_491
|
2399 |
+
2002/07/28/big/img_33
|
2400 |
+
2003/01/13/big/img_664
|
2401 |
+
2002/08/02/big/img_261
|
2402 |
+
2002/08/09/big/img_591
|
2403 |
+
2002/07/26/big/img_309
|
2404 |
+
2003/01/14/big/img_372
|
2405 |
+
2002/08/19/big/img_581
|
2406 |
+
2002/08/19/big/img_168
|
2407 |
+
2002/08/26/big/img_422
|
2408 |
+
2002/07/24/big/img_106
|
2409 |
+
2002/08/01/big/img_1936
|
2410 |
+
2002/08/05/big/img_3764
|
2411 |
+
2002/08/21/big/img_266
|
2412 |
+
2002/08/31/big/img_17968
|
2413 |
+
2002/08/01/big/img_1941
|
2414 |
+
2002/08/15/big/img_550
|
2415 |
+
2002/08/14/big/img_13
|
2416 |
+
2002/07/30/big/img_171
|
2417 |
+
2003/01/13/big/img_490
|
2418 |
+
2002/07/25/big/img_427
|
2419 |
+
2002/07/19/big/img_770
|
2420 |
+
2002/08/12/big/img_759
|
2421 |
+
2003/01/15/big/img_1360
|
2422 |
+
2002/08/05/big/img_3692
|
2423 |
+
2003/01/16/big/img_30
|
2424 |
+
2002/07/25/big/img_1026
|
2425 |
+
2002/07/22/big/img_288
|
2426 |
+
2002/08/29/big/img_18801
|
2427 |
+
2002/07/24/big/img_793
|
2428 |
+
2002/08/13/big/img_178
|
2429 |
+
2002/08/06/big/img_2322
|
2430 |
+
2003/01/14/big/img_560
|
2431 |
+
2002/08/18/big/img_408
|
2432 |
+
2003/01/16/big/img_915
|
2433 |
+
2003/01/16/big/img_679
|
2434 |
+
2002/08/07/big/img_1552
|
2435 |
+
2002/08/29/big/img_19050
|
2436 |
+
2002/08/01/big/img_2172
|
2437 |
+
2002/07/31/big/img_30
|
2438 |
+
2002/07/30/big/img_1019
|
2439 |
+
2002/07/30/big/img_587
|
2440 |
+
2003/01/13/big/img_773
|
2441 |
+
2002/07/30/big/img_410
|
2442 |
+
2002/07/28/big/img_65
|
2443 |
+
2002/08/05/big/img_3138
|
2444 |
+
2002/07/23/big/img_541
|
2445 |
+
2002/08/22/big/img_963
|
2446 |
+
2002/07/27/big/img_657
|
2447 |
+
2002/07/30/big/img_1051
|
2448 |
+
2003/01/16/big/img_150
|
2449 |
+
2002/07/31/big/img_519
|
2450 |
+
2002/08/01/big/img_1961
|
2451 |
+
2002/08/05/big/img_3752
|
2452 |
+
2002/07/23/big/img_631
|
2453 |
+
2003/01/14/big/img_237
|
2454 |
+
2002/07/28/big/img_21
|
2455 |
+
2002/07/22/big/img_813
|
2456 |
+
2002/08/05/big/img_3563
|
2457 |
+
2003/01/17/big/img_620
|
2458 |
+
2002/07/19/big/img_523
|
2459 |
+
2002/07/30/big/img_904
|
2460 |
+
2002/08/29/big/img_18642
|
2461 |
+
2002/08/11/big/img_492
|
2462 |
+
2002/08/01/big/img_2130
|
2463 |
+
2002/07/25/big/img_618
|
2464 |
+
2002/08/17/big/img_305
|
2465 |
+
2003/01/16/big/img_520
|
2466 |
+
2002/07/26/big/img_495
|
2467 |
+
2002/08/17/big/img_164
|
2468 |
+
2002/08/03/big/img_440
|
2469 |
+
2002/07/24/big/img_441
|
2470 |
+
2002/08/06/big/img_2146
|
2471 |
+
2002/08/11/big/img_558
|
2472 |
+
2002/08/02/big/img_545
|
2473 |
+
2002/08/31/big/img_18090
|
2474 |
+
2003/01/01/big/img_136
|
2475 |
+
2002/07/25/big/img_1099
|
2476 |
+
2003/01/13/big/img_728
|
2477 |
+
2003/01/16/big/img_197
|
2478 |
+
2002/07/26/big/img_651
|
2479 |
+
2002/08/11/big/img_676
|
2480 |
+
2003/01/15/big/img_10
|
2481 |
+
2002/08/21/big/img_250
|
2482 |
+
2002/08/14/big/img_325
|
2483 |
+
2002/08/04/big/img_390
|
2484 |
+
2002/07/24/big/img_554
|
2485 |
+
2003/01/16/big/img_333
|
2486 |
+
2002/07/31/big/img_922
|
2487 |
+
2002/09/02/big/img_15586
|
2488 |
+
2003/01/16/big/img_184
|
2489 |
+
2002/07/22/big/img_766
|
2490 |
+
2002/07/21/big/img_608
|
2491 |
+
2002/08/07/big/img_1578
|
2492 |
+
2002/08/17/big/img_961
|
2493 |
+
2002/07/27/big/img_324
|
2494 |
+
2002/08/05/big/img_3765
|
2495 |
+
2002/08/23/big/img_462
|
2496 |
+
2003/01/16/big/img_382
|
2497 |
+
2002/08/27/big/img_19838
|
2498 |
+
2002/08/01/big/img_1505
|
2499 |
+
2002/08/21/big/img_662
|
2500 |
+
2002/08/14/big/img_605
|
2501 |
+
2002/08/19/big/img_816
|
2502 |
+
2002/07/29/big/img_136
|
2503 |
+
2002/08/20/big/img_719
|
2504 |
+
2002/08/06/big/img_2826
|
2505 |
+
2002/08/10/big/img_630
|
2506 |
+
2003/01/17/big/img_973
|
2507 |
+
2002/08/14/big/img_116
|
2508 |
+
2002/08/02/big/img_666
|
2509 |
+
2002/08/21/big/img_710
|
2510 |
+
2002/08/05/big/img_55
|
2511 |
+
2002/07/31/big/img_229
|
2512 |
+
2002/08/01/big/img_1549
|
2513 |
+
2002/07/23/big/img_432
|
2514 |
+
2002/07/21/big/img_430
|
2515 |
+
2002/08/21/big/img_549
|
2516 |
+
2002/08/08/big/img_985
|
2517 |
+
2002/07/20/big/img_610
|
2518 |
+
2002/07/23/big/img_978
|
2519 |
+
2002/08/23/big/img_219
|
2520 |
+
2002/07/25/big/img_175
|
2521 |
+
2003/01/15/big/img_230
|
2522 |
+
2002/08/23/big/img_385
|
2523 |
+
2002/07/31/big/img_879
|
2524 |
+
2002/08/12/big/img_495
|
2525 |
+
2002/08/22/big/img_499
|
2526 |
+
2002/08/30/big/img_18322
|
2527 |
+
2002/08/15/big/img_795
|
2528 |
+
2002/08/13/big/img_835
|
2529 |
+
2003/01/17/big/img_930
|
2530 |
+
2002/07/30/big/img_873
|
2531 |
+
2002/08/11/big/img_257
|
2532 |
+
2002/07/31/big/img_593
|
2533 |
+
2002/08/21/big/img_916
|
2534 |
+
2003/01/13/big/img_814
|
2535 |
+
2002/07/25/big/img_722
|
2536 |
+
2002/08/16/big/img_379
|
2537 |
+
2002/07/31/big/img_497
|
2538 |
+
2002/07/22/big/img_602
|
2539 |
+
2002/08/21/big/img_642
|
2540 |
+
2002/08/21/big/img_614
|
2541 |
+
2002/08/23/big/img_482
|
2542 |
+
2002/07/29/big/img_603
|
2543 |
+
2002/08/13/big/img_705
|
2544 |
+
2002/07/23/big/img_833
|
2545 |
+
2003/01/14/big/img_511
|
2546 |
+
2002/07/24/big/img_376
|
2547 |
+
2002/08/17/big/img_1030
|
2548 |
+
2002/08/05/big/img_3576
|
2549 |
+
2002/08/16/big/img_540
|
2550 |
+
2002/07/22/big/img_630
|
2551 |
+
2002/08/10/big/img_180
|
2552 |
+
2002/08/14/big/img_905
|
2553 |
+
2002/08/29/big/img_18777
|
2554 |
+
2002/08/22/big/img_693
|
2555 |
+
2003/01/16/big/img_933
|
2556 |
+
2002/08/20/big/img_555
|
2557 |
+
2002/08/15/big/img_549
|
2558 |
+
2003/01/14/big/img_830
|
2559 |
+
2003/01/16/big/img_64
|
2560 |
+
2002/08/27/big/img_19670
|
2561 |
+
2002/08/22/big/img_729
|
2562 |
+
2002/07/27/big/img_981
|
2563 |
+
2002/08/09/big/img_458
|
2564 |
+
2003/01/17/big/img_884
|
2565 |
+
2002/07/25/big/img_639
|
2566 |
+
2002/08/31/big/img_18008
|
2567 |
+
2002/08/22/big/img_249
|
2568 |
+
2002/08/17/big/img_971
|
2569 |
+
2002/08/04/big/img_308
|
2570 |
+
2002/07/28/big/img_362
|
2571 |
+
2002/08/12/big/img_142
|
2572 |
+
2002/08/26/big/img_61
|
2573 |
+
2002/08/14/big/img_422
|
2574 |
+
2002/07/19/big/img_607
|
2575 |
+
2003/01/15/big/img_717
|
2576 |
+
2002/08/01/big/img_1475
|
2577 |
+
2002/08/29/big/img_19061
|
2578 |
+
2003/01/01/big/img_346
|
2579 |
+
2002/07/20/big/img_315
|
2580 |
+
2003/01/15/big/img_756
|
2581 |
+
2002/08/15/big/img_879
|
2582 |
+
2002/08/08/big/img_615
|
2583 |
+
2003/01/13/big/img_431
|
2584 |
+
2002/08/05/big/img_3233
|
2585 |
+
2002/08/24/big/img_526
|
2586 |
+
2003/01/13/big/img_717
|
2587 |
+
2002/09/01/big/img_16408
|
2588 |
+
2002/07/22/big/img_217
|
2589 |
+
2002/07/31/big/img_960
|
2590 |
+
2002/08/21/big/img_610
|
2591 |
+
2002/08/05/big/img_3753
|
2592 |
+
2002/08/03/big/img_151
|
2593 |
+
2002/08/21/big/img_267
|
2594 |
+
2002/08/01/big/img_2175
|
2595 |
+
2002/08/04/big/img_556
|
2596 |
+
2002/08/21/big/img_527
|
2597 |
+
2002/09/02/big/img_15800
|
2598 |
+
2002/07/27/big/img_156
|
2599 |
+
2002/07/20/big/img_590
|
2600 |
+
2002/08/15/big/img_700
|
2601 |
+
2002/08/08/big/img_444
|
2602 |
+
2002/07/25/big/img_94
|
2603 |
+
2002/07/24/big/img_778
|
2604 |
+
2002/08/14/big/img_694
|
2605 |
+
2002/07/20/big/img_666
|
2606 |
+
2002/08/02/big/img_200
|
2607 |
+
2002/08/02/big/img_578
|
2608 |
+
2003/01/17/big/img_332
|
2609 |
+
2002/09/01/big/img_16352
|
2610 |
+
2002/08/27/big/img_19668
|
2611 |
+
2002/07/23/big/img_823
|
2612 |
+
2002/08/13/big/img_431
|
2613 |
+
2003/01/16/big/img_463
|
2614 |
+
2002/08/27/big/img_19711
|
2615 |
+
2002/08/23/big/img_154
|
2616 |
+
2002/07/31/big/img_360
|
2617 |
+
2002/08/23/big/img_555
|
2618 |
+
2002/08/10/big/img_561
|
2619 |
+
2003/01/14/big/img_550
|
2620 |
+
2002/08/07/big/img_1370
|
2621 |
+
2002/07/30/big/img_1184
|
2622 |
+
2002/08/01/big/img_1445
|
2623 |
+
2002/08/23/big/img_22
|
2624 |
+
2002/07/30/big/img_606
|
2625 |
+
2003/01/17/big/img_271
|
2626 |
+
2002/08/31/big/img_17316
|
2627 |
+
2002/08/16/big/img_973
|
2628 |
+
2002/07/26/big/img_77
|
2629 |
+
2002/07/20/big/img_788
|
2630 |
+
2002/08/06/big/img_2426
|
2631 |
+
2002/08/07/big/img_1498
|
2632 |
+
2002/08/16/big/img_358
|
2633 |
+
2002/08/06/big/img_2851
|
2634 |
+
2002/08/12/big/img_359
|
2635 |
+
2002/08/01/big/img_1521
|
2636 |
+
2002/08/02/big/img_709
|
2637 |
+
2002/08/20/big/img_935
|
2638 |
+
2002/08/12/big/img_188
|
2639 |
+
2002/08/24/big/img_411
|
2640 |
+
2002/08/22/big/img_680
|
2641 |
+
2002/08/06/big/img_2480
|
2642 |
+
2002/07/20/big/img_627
|
2643 |
+
2002/07/30/big/img_214
|
2644 |
+
2002/07/25/big/img_354
|
2645 |
+
2002/08/02/big/img_636
|
2646 |
+
2003/01/15/big/img_661
|
2647 |
+
2002/08/07/big/img_1327
|
2648 |
+
2002/08/01/big/img_2108
|
2649 |
+
2002/08/31/big/img_17919
|
2650 |
+
2002/08/29/big/img_18768
|
2651 |
+
2002/08/05/big/img_3840
|
2652 |
+
2002/07/26/big/img_242
|
2653 |
+
2003/01/14/big/img_451
|
2654 |
+
2002/08/20/big/img_923
|
2655 |
+
2002/08/27/big/img_19908
|
2656 |
+
2002/08/16/big/img_282
|
2657 |
+
2002/08/19/big/img_440
|
2658 |
+
2003/01/01/big/img_230
|
2659 |
+
2002/08/08/big/img_212
|
2660 |
+
2002/07/20/big/img_443
|
2661 |
+
2002/08/25/big/img_635
|
2662 |
+
2003/01/13/big/img_1169
|
2663 |
+
2002/07/26/big/img_998
|
2664 |
+
2002/08/15/big/img_995
|
2665 |
+
2002/08/06/big/img_3002
|
2666 |
+
2002/07/29/big/img_460
|
2667 |
+
2003/01/14/big/img_925
|
2668 |
+
2002/07/23/big/img_539
|
2669 |
+
2002/08/16/big/img_694
|
2670 |
+
2003/01/13/big/img_459
|
2671 |
+
2002/07/23/big/img_249
|
2672 |
+
2002/08/20/big/img_539
|
2673 |
+
2002/08/04/big/img_186
|
2674 |
+
2002/08/26/big/img_264
|
2675 |
+
2002/07/22/big/img_704
|
2676 |
+
2002/08/25/big/img_277
|
2677 |
+
2002/08/22/big/img_988
|
2678 |
+
2002/07/29/big/img_504
|
2679 |
+
2002/08/05/big/img_3600
|
2680 |
+
2002/08/30/big/img_18380
|
2681 |
+
2003/01/14/big/img_937
|
2682 |
+
2002/08/21/big/img_254
|
2683 |
+
2002/08/10/big/img_130
|
2684 |
+
2002/08/20/big/img_339
|
2685 |
+
2003/01/14/big/img_428
|
2686 |
+
2002/08/20/big/img_889
|
2687 |
+
2002/08/31/big/img_17637
|
2688 |
+
2002/07/26/big/img_644
|
2689 |
+
2002/09/01/big/img_16776
|
2690 |
+
2002/08/06/big/img_2239
|
2691 |
+
2002/08/06/big/img_2646
|
2692 |
+
2003/01/13/big/img_491
|
2693 |
+
2002/08/10/big/img_579
|
2694 |
+
2002/08/21/big/img_713
|
2695 |
+
2002/08/22/big/img_482
|
2696 |
+
2002/07/22/big/img_167
|
2697 |
+
2002/07/24/big/img_539
|
2698 |
+
2002/08/14/big/img_721
|
2699 |
+
2002/07/25/big/img_389
|
2700 |
+
2002/09/01/big/img_16591
|
2701 |
+
2002/08/13/big/img_543
|
2702 |
+
2003/01/14/big/img_432
|
2703 |
+
2002/08/09/big/img_287
|
2704 |
+
2002/07/26/big/img_126
|
2705 |
+
2002/08/23/big/img_412
|
2706 |
+
2002/08/15/big/img_1034
|
2707 |
+
2002/08/28/big/img_19485
|
2708 |
+
2002/07/31/big/img_236
|
2709 |
+
2002/07/30/big/img_523
|
2710 |
+
2002/07/19/big/img_141
|
2711 |
+
2003/01/17/big/img_957
|
2712 |
+
2002/08/04/big/img_81
|
2713 |
+
2002/07/25/big/img_206
|
2714 |
+
2002/08/15/big/img_716
|
2715 |
+
2002/08/13/big/img_403
|
2716 |
+
2002/08/15/big/img_685
|
2717 |
+
2002/07/26/big/img_884
|
2718 |
+
2002/07/19/big/img_499
|
2719 |
+
2002/07/23/big/img_772
|
2720 |
+
2002/07/27/big/img_752
|
2721 |
+
2003/01/14/big/img_493
|
2722 |
+
2002/08/25/big/img_664
|
2723 |
+
2002/07/31/big/img_334
|
2724 |
+
2002/08/26/big/img_678
|
2725 |
+
2002/09/01/big/img_16541
|
2726 |
+
2003/01/14/big/img_347
|
2727 |
+
2002/07/23/big/img_187
|
2728 |
+
2002/07/30/big/img_1163
|
2729 |
+
2002/08/05/big/img_35
|
2730 |
+
2002/08/22/big/img_944
|
2731 |
+
2002/08/07/big/img_1239
|
2732 |
+
2002/07/29/big/img_1215
|
2733 |
+
2002/08/03/big/img_312
|
2734 |
+
2002/08/05/big/img_3523
|
2735 |
+
2002/07/29/big/img_218
|
2736 |
+
2002/08/13/big/img_672
|
2737 |
+
2002/08/16/big/img_205
|
2738 |
+
2002/08/17/big/img_594
|
2739 |
+
2002/07/29/big/img_1411
|
2740 |
+
2002/07/30/big/img_942
|
2741 |
+
2003/01/16/big/img_312
|
2742 |
+
2002/08/08/big/img_312
|
2743 |
+
2002/07/25/big/img_15
|
2744 |
+
2002/08/09/big/img_839
|
2745 |
+
2002/08/01/big/img_2069
|
2746 |
+
2002/08/31/big/img_17512
|
2747 |
+
2002/08/01/big/img_3
|
2748 |
+
2002/07/31/big/img_320
|
2749 |
+
2003/01/15/big/img_1265
|
2750 |
+
2002/08/14/big/img_563
|
2751 |
+
2002/07/31/big/img_167
|
2752 |
+
2002/08/20/big/img_374
|
2753 |
+
2002/08/13/big/img_406
|
2754 |
+
2002/08/08/big/img_625
|
2755 |
+
2002/08/02/big/img_314
|
2756 |
+
2002/08/27/big/img_19964
|
2757 |
+
2002/09/01/big/img_16670
|
2758 |
+
2002/07/31/big/img_599
|
2759 |
+
2002/08/29/big/img_18906
|
2760 |
+
2002/07/24/big/img_373
|
2761 |
+
2002/07/26/big/img_513
|
2762 |
+
2002/09/02/big/img_15497
|
2763 |
+
2002/08/19/big/img_117
|
2764 |
+
2003/01/01/big/img_158
|
2765 |
+
2002/08/24/big/img_178
|
2766 |
+
2003/01/13/big/img_935
|
2767 |
+
2002/08/13/big/img_609
|
2768 |
+
2002/08/30/big/img_18341
|
2769 |
+
2002/08/25/big/img_674
|
2770 |
+
2003/01/13/big/img_209
|
2771 |
+
2002/08/13/big/img_258
|
2772 |
+
2002/08/05/big/img_3543
|
2773 |
+
2002/08/07/big/img_1970
|
2774 |
+
2002/08/06/big/img_3004
|
2775 |
+
2003/01/17/big/img_487
|
2776 |
+
2002/08/24/big/img_873
|
2777 |
+
2002/08/29/big/img_18730
|
2778 |
+
2002/08/09/big/img_375
|
2779 |
+
2003/01/16/big/img_751
|
2780 |
+
2002/08/02/big/img_603
|
2781 |
+
2002/08/19/big/img_325
|
2782 |
+
2002/09/01/big/img_16420
|
2783 |
+
2002/08/05/big/img_3633
|
2784 |
+
2002/08/21/big/img_516
|
2785 |
+
2002/07/19/big/img_501
|
2786 |
+
2002/07/26/big/img_688
|
2787 |
+
2002/07/24/big/img_256
|
2788 |
+
2002/07/25/big/img_438
|
2789 |
+
2002/07/31/big/img_1017
|
2790 |
+
2002/08/22/big/img_512
|
2791 |
+
2002/07/21/big/img_543
|
2792 |
+
2002/08/08/big/img_223
|
2793 |
+
2002/08/19/big/img_189
|
2794 |
+
2002/08/12/big/img_630
|
2795 |
+
2002/07/30/big/img_958
|
2796 |
+
2002/07/28/big/img_208
|
2797 |
+
2002/08/31/big/img_17691
|
2798 |
+
2002/07/22/big/img_542
|
2799 |
+
2002/07/19/big/img_741
|
2800 |
+
2002/07/19/big/img_158
|
2801 |
+
2002/08/15/big/img_399
|
2802 |
+
2002/08/01/big/img_2159
|
2803 |
+
2002/08/14/big/img_455
|
2804 |
+
2002/08/17/big/img_1011
|
2805 |
+
2002/08/26/big/img_744
|
2806 |
+
2002/08/12/big/img_624
|
2807 |
+
2003/01/17/big/img_821
|
2808 |
+
2002/08/16/big/img_980
|
2809 |
+
2002/07/28/big/img_281
|
2810 |
+
2002/07/25/big/img_171
|
2811 |
+
2002/08/03/big/img_116
|
2812 |
+
2002/07/22/big/img_467
|
2813 |
+
2002/07/31/big/img_750
|
2814 |
+
2002/07/26/big/img_435
|
2815 |
+
2002/07/19/big/img_822
|
2816 |
+
2002/08/13/big/img_626
|
2817 |
+
2002/08/11/big/img_344
|
2818 |
+
2002/08/02/big/img_473
|
2819 |
+
2002/09/01/big/img_16817
|
2820 |
+
2002/08/01/big/img_1275
|
2821 |
+
2002/08/28/big/img_19270
|
2822 |
+
2002/07/23/big/img_607
|
2823 |
+
2002/08/09/big/img_316
|
2824 |
+
2002/07/29/big/img_626
|
2825 |
+
2002/07/24/big/img_824
|
2826 |
+
2002/07/22/big/img_342
|
2827 |
+
2002/08/08/big/img_794
|
2828 |
+
2002/08/07/big/img_1209
|
2829 |
+
2002/07/19/big/img_18
|
2830 |
+
2002/08/25/big/img_634
|
2831 |
+
2002/07/24/big/img_730
|
2832 |
+
2003/01/17/big/img_356
|
2833 |
+
2002/07/23/big/img_305
|
2834 |
+
2002/07/30/big/img_453
|
2835 |
+
2003/01/13/big/img_972
|
2836 |
+
2002/08/06/big/img_2610
|
2837 |
+
2002/08/29/big/img_18920
|
2838 |
+
2002/07/31/big/img_123
|
2839 |
+
2002/07/26/big/img_979
|
2840 |
+
2002/08/24/big/img_635
|
2841 |
+
2002/08/05/big/img_3704
|
2842 |
+
2002/08/07/big/img_1358
|
2843 |
+
2002/07/22/big/img_306
|
2844 |
+
2002/08/13/big/img_619
|
2845 |
+
2002/08/02/big/img_366
|
data/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from .wider_face import WiderFaceDetection, detection_collate
|
2 |
+
from .data_augment import *
|
3 |
+
from .config import *
|
data/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (291 Bytes). View file
|
|
data/__pycache__/config.cpython-39.pyc
ADDED
Binary file (782 Bytes). View file
|
|
data/__pycache__/data_augment.cpython-39.pyc
ADDED
Binary file (5.76 kB). View file
|
|
data/__pycache__/wider_face.cpython-39.pyc
ADDED
Binary file (3.19 kB). View file
|
|
data/config.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# config.py
|
2 |
+
|
3 |
+
cfg_mnet = {
|
4 |
+
'name': 'mobilenet0.25',
|
5 |
+
'min_sizes': [[16, 32], [64, 128], [256, 512]],
|
6 |
+
'steps': [8, 16, 32],
|
7 |
+
'variance': [0.1, 0.2],
|
8 |
+
'clip': False,
|
9 |
+
'loc_weight': 2.0,
|
10 |
+
'gpu_train': True,
|
11 |
+
'batch_size': 32,
|
12 |
+
'ngpu': 1,
|
13 |
+
'epoch': 250,
|
14 |
+
'decay1': 190,
|
15 |
+
'decay2': 220,
|
16 |
+
'image_size': 640,
|
17 |
+
'pretrain': True,
|
18 |
+
'return_layers': {'stage1': 1, 'stage2': 2, 'stage3': 3},
|
19 |
+
'in_channel': 32,
|
20 |
+
'out_channel': 64
|
21 |
+
}
|
22 |
+
|
23 |
+
cfg_re50 = {
|
24 |
+
'name': 'Resnet50',
|
25 |
+
'min_sizes': [[16, 32], [64, 128], [256, 512]],
|
26 |
+
'steps': [8, 16, 32],
|
27 |
+
'variance': [0.1, 0.2],
|
28 |
+
'clip': False,
|
29 |
+
'loc_weight': 2.0,
|
30 |
+
'gpu_train': True,
|
31 |
+
'batch_size': 24,
|
32 |
+
'ngpu': 4,
|
33 |
+
'epoch': 100,
|
34 |
+
'decay1': 70,
|
35 |
+
'decay2': 90,
|
36 |
+
'image_size': 840,
|
37 |
+
'pretrain': True,
|
38 |
+
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
|
39 |
+
'in_channel': 256,
|
40 |
+
'out_channel': 256
|
41 |
+
}
|
42 |
+
|
data/data_augment.py
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import numpy as np
|
3 |
+
import random
|
4 |
+
from utils.box_utils import matrix_iof
|
5 |
+
|
6 |
+
|
7 |
+
def _crop(image, boxes, labels, landm, img_dim):
|
8 |
+
height, width, _ = image.shape
|
9 |
+
pad_image_flag = True
|
10 |
+
|
11 |
+
for _ in range(250):
|
12 |
+
"""
|
13 |
+
if random.uniform(0, 1) <= 0.2:
|
14 |
+
scale = 1.0
|
15 |
+
else:
|
16 |
+
scale = random.uniform(0.3, 1.0)
|
17 |
+
"""
|
18 |
+
PRE_SCALES = [0.3, 0.45, 0.6, 0.8, 1.0]
|
19 |
+
scale = random.choice(PRE_SCALES)
|
20 |
+
short_side = min(width, height)
|
21 |
+
w = int(scale * short_side)
|
22 |
+
h = w
|
23 |
+
|
24 |
+
if width == w:
|
25 |
+
l = 0
|
26 |
+
else:
|
27 |
+
l = random.randrange(width - w)
|
28 |
+
if height == h:
|
29 |
+
t = 0
|
30 |
+
else:
|
31 |
+
t = random.randrange(height - h)
|
32 |
+
roi = np.array((l, t, l + w, t + h))
|
33 |
+
|
34 |
+
value = matrix_iof(boxes, roi[np.newaxis])
|
35 |
+
flag = (value >= 1)
|
36 |
+
if not flag.any():
|
37 |
+
continue
|
38 |
+
|
39 |
+
centers = (boxes[:, :2] + boxes[:, 2:]) / 2
|
40 |
+
mask_a = np.logical_and(roi[:2] < centers, centers < roi[2:]).all(axis=1)
|
41 |
+
boxes_t = boxes[mask_a].copy()
|
42 |
+
labels_t = labels[mask_a].copy()
|
43 |
+
landms_t = landm[mask_a].copy()
|
44 |
+
landms_t = landms_t.reshape([-1, 5, 2])
|
45 |
+
|
46 |
+
if boxes_t.shape[0] == 0:
|
47 |
+
continue
|
48 |
+
|
49 |
+
image_t = image[roi[1]:roi[3], roi[0]:roi[2]]
|
50 |
+
|
51 |
+
boxes_t[:, :2] = np.maximum(boxes_t[:, :2], roi[:2])
|
52 |
+
boxes_t[:, :2] -= roi[:2]
|
53 |
+
boxes_t[:, 2:] = np.minimum(boxes_t[:, 2:], roi[2:])
|
54 |
+
boxes_t[:, 2:] -= roi[:2]
|
55 |
+
|
56 |
+
# landm
|
57 |
+
landms_t[:, :, :2] = landms_t[:, :, :2] - roi[:2]
|
58 |
+
landms_t[:, :, :2] = np.maximum(landms_t[:, :, :2], np.array([0, 0]))
|
59 |
+
landms_t[:, :, :2] = np.minimum(landms_t[:, :, :2], roi[2:] - roi[:2])
|
60 |
+
landms_t = landms_t.reshape([-1, 10])
|
61 |
+
|
62 |
+
|
63 |
+
# make sure that the cropped image contains at least one face > 16 pixel at training image scale
|
64 |
+
b_w_t = (boxes_t[:, 2] - boxes_t[:, 0] + 1) / w * img_dim
|
65 |
+
b_h_t = (boxes_t[:, 3] - boxes_t[:, 1] + 1) / h * img_dim
|
66 |
+
mask_b = np.minimum(b_w_t, b_h_t) > 0.0
|
67 |
+
boxes_t = boxes_t[mask_b]
|
68 |
+
labels_t = labels_t[mask_b]
|
69 |
+
landms_t = landms_t[mask_b]
|
70 |
+
|
71 |
+
if boxes_t.shape[0] == 0:
|
72 |
+
continue
|
73 |
+
|
74 |
+
pad_image_flag = False
|
75 |
+
|
76 |
+
return image_t, boxes_t, labels_t, landms_t, pad_image_flag
|
77 |
+
return image, boxes, labels, landm, pad_image_flag
|
78 |
+
|
79 |
+
|
80 |
+
def _distort(image):
|
81 |
+
|
82 |
+
def _convert(image, alpha=1, beta=0):
|
83 |
+
tmp = image.astype(float) * alpha + beta
|
84 |
+
tmp[tmp < 0] = 0
|
85 |
+
tmp[tmp > 255] = 255
|
86 |
+
image[:] = tmp
|
87 |
+
|
88 |
+
image = image.copy()
|
89 |
+
|
90 |
+
if random.randrange(2):
|
91 |
+
|
92 |
+
#brightness distortion
|
93 |
+
if random.randrange(2):
|
94 |
+
_convert(image, beta=random.uniform(-32, 32))
|
95 |
+
|
96 |
+
#contrast distortion
|
97 |
+
if random.randrange(2):
|
98 |
+
_convert(image, alpha=random.uniform(0.5, 1.5))
|
99 |
+
|
100 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
|
101 |
+
|
102 |
+
#saturation distortion
|
103 |
+
if random.randrange(2):
|
104 |
+
_convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5))
|
105 |
+
|
106 |
+
#hue distortion
|
107 |
+
if random.randrange(2):
|
108 |
+
tmp = image[:, :, 0].astype(int) + random.randint(-18, 18)
|
109 |
+
tmp %= 180
|
110 |
+
image[:, :, 0] = tmp
|
111 |
+
|
112 |
+
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
|
113 |
+
|
114 |
+
else:
|
115 |
+
|
116 |
+
#brightness distortion
|
117 |
+
if random.randrange(2):
|
118 |
+
_convert(image, beta=random.uniform(-32, 32))
|
119 |
+
|
120 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
|
121 |
+
|
122 |
+
#saturation distortion
|
123 |
+
if random.randrange(2):
|
124 |
+
_convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5))
|
125 |
+
|
126 |
+
#hue distortion
|
127 |
+
if random.randrange(2):
|
128 |
+
tmp = image[:, :, 0].astype(int) + random.randint(-18, 18)
|
129 |
+
tmp %= 180
|
130 |
+
image[:, :, 0] = tmp
|
131 |
+
|
132 |
+
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
|
133 |
+
|
134 |
+
#contrast distortion
|
135 |
+
if random.randrange(2):
|
136 |
+
_convert(image, alpha=random.uniform(0.5, 1.5))
|
137 |
+
|
138 |
+
return image
|
139 |
+
|
140 |
+
|
141 |
+
def _expand(image, boxes, fill, p):
|
142 |
+
if random.randrange(2):
|
143 |
+
return image, boxes
|
144 |
+
|
145 |
+
height, width, depth = image.shape
|
146 |
+
|
147 |
+
scale = random.uniform(1, p)
|
148 |
+
w = int(scale * width)
|
149 |
+
h = int(scale * height)
|
150 |
+
|
151 |
+
left = random.randint(0, w - width)
|
152 |
+
top = random.randint(0, h - height)
|
153 |
+
|
154 |
+
boxes_t = boxes.copy()
|
155 |
+
boxes_t[:, :2] += (left, top)
|
156 |
+
boxes_t[:, 2:] += (left, top)
|
157 |
+
expand_image = np.empty(
|
158 |
+
(h, w, depth),
|
159 |
+
dtype=image.dtype)
|
160 |
+
expand_image[:, :] = fill
|
161 |
+
expand_image[top:top + height, left:left + width] = image
|
162 |
+
image = expand_image
|
163 |
+
|
164 |
+
return image, boxes_t
|
165 |
+
|
166 |
+
|
167 |
+
def _mirror(image, boxes, landms):
|
168 |
+
_, width, _ = image.shape
|
169 |
+
if random.randrange(2):
|
170 |
+
image = image[:, ::-1]
|
171 |
+
boxes = boxes.copy()
|
172 |
+
boxes[:, 0::2] = width - boxes[:, 2::-2]
|
173 |
+
|
174 |
+
# landm
|
175 |
+
landms = landms.copy()
|
176 |
+
landms = landms.reshape([-1, 5, 2])
|
177 |
+
landms[:, :, 0] = width - landms[:, :, 0]
|
178 |
+
tmp = landms[:, 1, :].copy()
|
179 |
+
landms[:, 1, :] = landms[:, 0, :]
|
180 |
+
landms[:, 0, :] = tmp
|
181 |
+
tmp1 = landms[:, 4, :].copy()
|
182 |
+
landms[:, 4, :] = landms[:, 3, :]
|
183 |
+
landms[:, 3, :] = tmp1
|
184 |
+
landms = landms.reshape([-1, 10])
|
185 |
+
|
186 |
+
return image, boxes, landms
|
187 |
+
|
188 |
+
|
189 |
+
def _pad_to_square(image, rgb_mean, pad_image_flag):
|
190 |
+
if not pad_image_flag:
|
191 |
+
return image
|
192 |
+
height, width, _ = image.shape
|
193 |
+
long_side = max(width, height)
|
194 |
+
image_t = np.empty((long_side, long_side, 3), dtype=image.dtype)
|
195 |
+
image_t[:, :] = rgb_mean
|
196 |
+
image_t[0:0 + height, 0:0 + width] = image
|
197 |
+
return image_t
|
198 |
+
|
199 |
+
|
200 |
+
def _resize_subtract_mean(image, insize, rgb_mean):
|
201 |
+
interp_methods = [cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_AREA, cv2.INTER_NEAREST, cv2.INTER_LANCZOS4]
|
202 |
+
interp_method = interp_methods[random.randrange(5)]
|
203 |
+
image = cv2.resize(image, (insize, insize), interpolation=interp_method)
|
204 |
+
image = image.astype(np.float32)
|
205 |
+
image -= rgb_mean
|
206 |
+
return image.transpose(2, 0, 1)
|
207 |
+
|
208 |
+
|
209 |
+
class preproc(object):
|
210 |
+
|
211 |
+
def __init__(self, img_dim, rgb_means):
|
212 |
+
self.img_dim = img_dim
|
213 |
+
self.rgb_means = rgb_means
|
214 |
+
|
215 |
+
def __call__(self, image, targets):
|
216 |
+
assert targets.shape[0] > 0, "this image does not have gt"
|
217 |
+
|
218 |
+
boxes = targets[:, :4].copy()
|
219 |
+
labels = targets[:, -1].copy()
|
220 |
+
landm = targets[:, 4:-1].copy()
|
221 |
+
|
222 |
+
image_t, boxes_t, labels_t, landm_t, pad_image_flag = _crop(image, boxes, labels, landm, self.img_dim)
|
223 |
+
image_t = _distort(image_t)
|
224 |
+
image_t = _pad_to_square(image_t,self.rgb_means, pad_image_flag)
|
225 |
+
image_t, boxes_t, landm_t = _mirror(image_t, boxes_t, landm_t)
|
226 |
+
height, width, _ = image_t.shape
|
227 |
+
image_t = _resize_subtract_mean(image_t, self.img_dim, self.rgb_means)
|
228 |
+
boxes_t[:, 0::2] /= width
|
229 |
+
boxes_t[:, 1::2] /= height
|
230 |
+
|
231 |
+
landm_t[:, 0::2] /= width
|
232 |
+
landm_t[:, 1::2] /= height
|
233 |
+
|
234 |
+
labels_t = np.expand_dims(labels_t, 1)
|
235 |
+
targets_t = np.hstack((boxes_t, landm_t, labels_t))
|
236 |
+
|
237 |
+
return image_t, targets_t
|
data/wider_face.py
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import os.path
|
3 |
+
import sys
|
4 |
+
import torch
|
5 |
+
import torch.utils.data as data
|
6 |
+
import cv2
|
7 |
+
import numpy as np
|
8 |
+
|
9 |
+
class WiderFaceDetection(data.Dataset):
|
10 |
+
def __init__(self, txt_path, preproc=None):
|
11 |
+
self.preproc = preproc
|
12 |
+
self.imgs_path = []
|
13 |
+
self.words = []
|
14 |
+
f = open(txt_path,'r')
|
15 |
+
lines = f.readlines()
|
16 |
+
isFirst = True
|
17 |
+
labels = []
|
18 |
+
for line in lines:
|
19 |
+
line = line.rstrip()
|
20 |
+
if line.startswith('#'):
|
21 |
+
if isFirst is True:
|
22 |
+
isFirst = False
|
23 |
+
else:
|
24 |
+
labels_copy = labels.copy()
|
25 |
+
self.words.append(labels_copy)
|
26 |
+
labels.clear()
|
27 |
+
path = line[2:]
|
28 |
+
path = txt_path.replace('label.txt','images/') + path
|
29 |
+
self.imgs_path.append(path)
|
30 |
+
else:
|
31 |
+
line = line.split(' ')
|
32 |
+
label = [float(x) for x in line]
|
33 |
+
labels.append(label)
|
34 |
+
|
35 |
+
self.words.append(labels)
|
36 |
+
|
37 |
+
def __len__(self):
|
38 |
+
return len(self.imgs_path)
|
39 |
+
|
40 |
+
def __getitem__(self, index):
|
41 |
+
img = cv2.imread(self.imgs_path[index])
|
42 |
+
height, width, _ = img.shape
|
43 |
+
|
44 |
+
labels = self.words[index]
|
45 |
+
annotations = np.zeros((0, 15))
|
46 |
+
if len(labels) == 0:
|
47 |
+
return annotations
|
48 |
+
for idx, label in enumerate(labels):
|
49 |
+
annotation = np.zeros((1, 15))
|
50 |
+
# bbox
|
51 |
+
annotation[0, 0] = label[0] # x1
|
52 |
+
annotation[0, 1] = label[1] # y1
|
53 |
+
annotation[0, 2] = label[0] + label[2] # x2
|
54 |
+
annotation[0, 3] = label[1] + label[3] # y2
|
55 |
+
|
56 |
+
# landmarks
|
57 |
+
annotation[0, 4] = label[4] # l0_x
|
58 |
+
annotation[0, 5] = label[5] # l0_y
|
59 |
+
annotation[0, 6] = label[7] # l1_x
|
60 |
+
annotation[0, 7] = label[8] # l1_y
|
61 |
+
annotation[0, 8] = label[10] # l2_x
|
62 |
+
annotation[0, 9] = label[11] # l2_y
|
63 |
+
annotation[0, 10] = label[13] # l3_x
|
64 |
+
annotation[0, 11] = label[14] # l3_y
|
65 |
+
annotation[0, 12] = label[16] # l4_x
|
66 |
+
annotation[0, 13] = label[17] # l4_y
|
67 |
+
if (annotation[0, 4]<0):
|
68 |
+
annotation[0, 14] = -1
|
69 |
+
else:
|
70 |
+
annotation[0, 14] = 1
|
71 |
+
|
72 |
+
annotations = np.append(annotations, annotation, axis=0)
|
73 |
+
target = np.array(annotations)
|
74 |
+
if self.preproc is not None:
|
75 |
+
img, target = self.preproc(img, target)
|
76 |
+
|
77 |
+
return torch.from_numpy(img), target
|
78 |
+
|
79 |
+
def detection_collate(batch):
|
80 |
+
"""Custom collate fn for dealing with batches of images that have a different
|
81 |
+
number of associated object annotations (bounding boxes).
|
82 |
+
|
83 |
+
Arguments:
|
84 |
+
batch: (tuple) A tuple of tensor images and lists of annotations
|
85 |
+
|
86 |
+
Return:
|
87 |
+
A tuple containing:
|
88 |
+
1) (tensor) batch of images stacked on their 0 dim
|
89 |
+
2) (list of tensors) annotations for a given image are stacked on 0 dim
|
90 |
+
"""
|
91 |
+
targets = []
|
92 |
+
imgs = []
|
93 |
+
for _, sample in enumerate(batch):
|
94 |
+
for _, tup in enumerate(sample):
|
95 |
+
if torch.is_tensor(tup):
|
96 |
+
imgs.append(tup)
|
97 |
+
elif isinstance(tup, type(np.empty(0))):
|
98 |
+
annos = torch.from_numpy(tup).float()
|
99 |
+
targets.append(annos)
|
100 |
+
|
101 |
+
return (torch.stack(imgs, 0), targets)
|
dataset/__init__.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .abstract_dataset import AbstractDataset
|
2 |
+
from .faceforensics import FaceForensics
|
3 |
+
from .wild_deepfake import WildDeepfake
|
4 |
+
from .celeb_df import CelebDF
|
5 |
+
from .dfdc import DFDC
|
6 |
+
|
7 |
+
LOADERS = {
|
8 |
+
"FaceForensics": FaceForensics,
|
9 |
+
"WildDeepfake": WildDeepfake,
|
10 |
+
"CelebDF": CelebDF,
|
11 |
+
"DFDC": DFDC,
|
12 |
+
}
|
13 |
+
|
14 |
+
|
15 |
+
def load_dataset(name="FaceForensics"):
|
16 |
+
print(f"Loading dataset: '{name}'...")
|
17 |
+
return LOADERS[name]
|
dataset/abstract_dataset.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import torch
|
3 |
+
import numpy as np
|
4 |
+
from torchvision.datasets import VisionDataset
|
5 |
+
import albumentations
|
6 |
+
from albumentations import Compose
|
7 |
+
from albumentations.pytorch.transforms import ToTensorV2
|
8 |
+
|
9 |
+
|
10 |
+
class AbstractDataset(VisionDataset):
|
11 |
+
def __init__(self, cfg, seed=2022, transforms=None, transform=None, target_transform=None):
|
12 |
+
super(AbstractDataset, self).__init__(cfg['root'], transforms=transforms,
|
13 |
+
transform=transform, target_transform=target_transform)
|
14 |
+
# fix for re-production
|
15 |
+
np.random.seed(seed)
|
16 |
+
|
17 |
+
self.images = list()
|
18 |
+
self.targets = list()
|
19 |
+
self.split = cfg['split']
|
20 |
+
if self.transforms is None:
|
21 |
+
self.transforms = Compose(
|
22 |
+
[getattr(albumentations, _['name'])(**_['params']) for _ in cfg['transforms']] +
|
23 |
+
[ToTensorV2()]
|
24 |
+
)
|
25 |
+
|
26 |
+
def __len__(self):
|
27 |
+
return len(self.images)
|
28 |
+
|
29 |
+
def __getitem__(self, index):
|
30 |
+
path = self.images[index]
|
31 |
+
tgt = self.targets[index]
|
32 |
+
return path, tgt
|
33 |
+
|
34 |
+
def load_item(self, items):
|
35 |
+
images = list()
|
36 |
+
for item in items:
|
37 |
+
img = cv2.imread(item)
|
38 |
+
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
39 |
+
image = self.transforms(image=img)['image']
|
40 |
+
images.append(image)
|
41 |
+
return torch.stack(images, dim=0)
|
dataset/celeb_df.py
ADDED
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
from glob import glob
|
3 |
+
from os import listdir
|
4 |
+
from os.path import join
|
5 |
+
from dataset import AbstractDataset
|
6 |
+
|
7 |
+
SPLITS = ["train", "test"]
|
8 |
+
|
9 |
+
|
10 |
+
class CelebDF(AbstractDataset):
|
11 |
+
"""
|
12 |
+
Celeb-DF v2 Dataset proposed in "Celeb-DF: A Large-scale Challenging Dataset for DeepFake Forensics".
|
13 |
+
"""
|
14 |
+
|
15 |
+
def __init__(self, cfg, seed=2022, transforms=None, transform=None, target_transform=None):
|
16 |
+
# pre-check
|
17 |
+
if cfg['split'] not in SPLITS:
|
18 |
+
raise ValueError(f"split should be one of {SPLITS}, but found {cfg['split']}.")
|
19 |
+
super(CelebDF, self).__init__(cfg, seed, transforms, transform, target_transform)
|
20 |
+
print(f"Loading data from 'Celeb-DF' of split '{cfg['split']}'"
|
21 |
+
f"\nPlease wait patiently...")
|
22 |
+
self.categories = ['original', 'fake']
|
23 |
+
self.root = cfg['root']
|
24 |
+
images_ids = self.__get_images_ids()
|
25 |
+
test_ids = self.__get_test_ids()
|
26 |
+
train_ids = [images_ids[0] - test_ids[0],
|
27 |
+
images_ids[1] - test_ids[1],
|
28 |
+
images_ids[2] - test_ids[2]]
|
29 |
+
self.images, self.targets = self.__get_images(
|
30 |
+
test_ids if cfg['split'] == "test" else train_ids, cfg['balance'])
|
31 |
+
assert len(self.images) == len(self.targets), "The number of images and targets not consistent."
|
32 |
+
print("Data from 'Celeb-DF' loaded.\n")
|
33 |
+
print(f"Dataset contains {len(self.images)} images.\n")
|
34 |
+
|
35 |
+
def __get_images_ids(self):
|
36 |
+
youtube_real = listdir(join(self.root, 'YouTube-real', 'images'))
|
37 |
+
celeb_real = listdir(join(self.root, 'Celeb-real', 'images'))
|
38 |
+
celeb_fake = listdir(join(self.root, 'Celeb-synthesis', 'images'))
|
39 |
+
return set(youtube_real), set(celeb_real), set(celeb_fake)
|
40 |
+
|
41 |
+
def __get_test_ids(self):
|
42 |
+
youtube_real = set()
|
43 |
+
celeb_real = set()
|
44 |
+
celeb_fake = set()
|
45 |
+
with open(join(self.root, "List_of_testing_videos.txt"), "r", encoding="utf-8") as f:
|
46 |
+
contents = f.readlines()
|
47 |
+
for line in contents:
|
48 |
+
name = line.split(" ")[-1]
|
49 |
+
number = name.split("/")[-1].split(".")[0]
|
50 |
+
if "YouTube-real" in name:
|
51 |
+
youtube_real.add(number)
|
52 |
+
elif "Celeb-real" in name:
|
53 |
+
celeb_real.add(number)
|
54 |
+
elif "Celeb-synthesis" in name:
|
55 |
+
celeb_fake.add(number)
|
56 |
+
else:
|
57 |
+
raise ValueError("'List_of_testing_videos.txt' file corrupted.")
|
58 |
+
return youtube_real, celeb_real, celeb_fake
|
59 |
+
|
60 |
+
def __get_images(self, ids, balance=False):
|
61 |
+
real = list()
|
62 |
+
fake = list()
|
63 |
+
# YouTube-real
|
64 |
+
for _ in ids[0]:
|
65 |
+
real.extend(glob(join(self.root, 'YouTube-real', 'images', _, '*.png')))
|
66 |
+
# Celeb-real
|
67 |
+
for _ in ids[1]:
|
68 |
+
real.extend(glob(join(self.root, 'Celeb-real', 'images', _, '*.png')))
|
69 |
+
# Celeb-synthesis
|
70 |
+
for _ in ids[2]:
|
71 |
+
fake.extend(glob(join(self.root, 'Celeb-synthesis', 'images', _, '*.png')))
|
72 |
+
print(f"Real: {len(real)}, Fake: {len(fake)}")
|
73 |
+
if balance:
|
74 |
+
fake = np.random.choice(fake, size=len(real), replace=False)
|
75 |
+
print(f"After Balance | Real: {len(real)}, Fake: {len(fake)}")
|
76 |
+
real_tgt = [0] * len(real)
|
77 |
+
fake_tgt = [1] * len(fake)
|
78 |
+
return [*real, *fake], [*real_tgt, *fake_tgt]
|
79 |
+
|
80 |
+
|
81 |
+
if __name__ == '__main__':
|
82 |
+
import yaml
|
83 |
+
|
84 |
+
config_path = "../config/dataset/celeb_df.yml"
|
85 |
+
with open(config_path) as config_file:
|
86 |
+
config = yaml.load(config_file, Loader=yaml.FullLoader)
|
87 |
+
config = config["train_cfg"]
|
88 |
+
# config = config["test_cfg"]
|
89 |
+
|
90 |
+
def run_dataset():
|
91 |
+
dataset = CelebDF(config)
|
92 |
+
print(f"dataset: {len(dataset)}")
|
93 |
+
for i, _ in enumerate(dataset):
|
94 |
+
path, target = _
|
95 |
+
print(f"path: {path}, target: {target}")
|
96 |
+
if i >= 9:
|
97 |
+
break
|
98 |
+
|
99 |
+
|
100 |
+
def run_dataloader(display_samples=False):
|
101 |
+
from torch.utils import data
|
102 |
+
import matplotlib.pyplot as plt
|
103 |
+
|
104 |
+
dataset = CelebDF(config)
|
105 |
+
dataloader = data.DataLoader(dataset, batch_size=8, shuffle=True)
|
106 |
+
print(f"dataset: {len(dataset)}")
|
107 |
+
for i, _ in enumerate(dataloader):
|
108 |
+
path, targets = _
|
109 |
+
image = dataloader.dataset.load_item(path)
|
110 |
+
print(f"image: {image.shape}, target: {targets}")
|
111 |
+
if display_samples:
|
112 |
+
plt.figure()
|
113 |
+
img = image[0].permute([1, 2, 0]).numpy()
|
114 |
+
plt.imshow(img)
|
115 |
+
# plt.savefig("./img_" + str(i) + ".png")
|
116 |
+
plt.show()
|
117 |
+
if i >= 9:
|
118 |
+
break
|
119 |
+
|
120 |
+
|
121 |
+
###########################
|
122 |
+
# run the functions below #
|
123 |
+
###########################
|
124 |
+
|
125 |
+
# run_dataset()
|
126 |
+
run_dataloader(False)
|
dataset/dfdc.py
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
from glob import glob
|
3 |
+
from os.path import join
|
4 |
+
from dataset import AbstractDataset
|
5 |
+
|
6 |
+
SPLIT = ["train", "val", "test"]
|
7 |
+
LABEL_MAP = {"REAL": 0, "FAKE": 1}
|
8 |
+
|
9 |
+
|
10 |
+
class DFDC(AbstractDataset):
|
11 |
+
"""
|
12 |
+
Deepfake Detection Challenge organized by Facebook
|
13 |
+
"""
|
14 |
+
|
15 |
+
def __init__(self, cfg, seed=2022, transforms=None, transform=None, target_transform=None):
|
16 |
+
# pre-check
|
17 |
+
if cfg['split'] not in SPLIT:
|
18 |
+
raise ValueError(f"split should be one of {SPLIT}, but found {cfg['split']}.")
|
19 |
+
super(DFDC, self).__init__(cfg, seed, transforms, transform, target_transform)
|
20 |
+
print(f"Loading data from 'DFDC' of split '{cfg['split']}'"
|
21 |
+
f"\nPlease wait patiently...")
|
22 |
+
self.categories = ['original', 'fake']
|
23 |
+
self.root = cfg['root']
|
24 |
+
self.num_real = 0
|
25 |
+
self.num_fake = 0
|
26 |
+
if self.split == "test":
|
27 |
+
self.__load_test_data()
|
28 |
+
elif self.split == "train":
|
29 |
+
self.__load_train_data()
|
30 |
+
assert len(self.images) == len(self.targets), "Length of images and targets not the same!"
|
31 |
+
print(f"Data from 'DFDC' loaded.")
|
32 |
+
print(f"Real: {self.num_real}, Fake: {self.num_fake}.")
|
33 |
+
print(f"Dataset contains {len(self.images)} images\n")
|
34 |
+
|
35 |
+
def __load_test_data(self):
|
36 |
+
label_path = join(self.root, "test", "labels.csv")
|
37 |
+
with open(label_path, encoding="utf-8") as file:
|
38 |
+
content = file.readlines()
|
39 |
+
for _ in content:
|
40 |
+
if ".mp4" in _:
|
41 |
+
key = _.split(".")[0]
|
42 |
+
label = _.split(",")[1].strip()
|
43 |
+
label = int(label)
|
44 |
+
imgs = glob(join(self.root, "test", "images", key, "*.png"))
|
45 |
+
num = len(imgs)
|
46 |
+
self.images.extend(imgs)
|
47 |
+
self.targets.extend([label] * num)
|
48 |
+
if label == 0:
|
49 |
+
self.num_real += num
|
50 |
+
elif label == 1:
|
51 |
+
self.num_fake += num
|
52 |
+
|
53 |
+
def __load_train_data(self):
|
54 |
+
train_folds = glob(join(self.root, "dfdc_train_part_*"))
|
55 |
+
for fold in train_folds:
|
56 |
+
fold_imgs = list()
|
57 |
+
fold_tgts = list()
|
58 |
+
metadata_path = join(fold, "metadata.json")
|
59 |
+
try:
|
60 |
+
with open(metadata_path, "r", encoding="utf-8") as file:
|
61 |
+
metadata = json.loads(file.readline())
|
62 |
+
for k, v in metadata.items():
|
63 |
+
index = k.split(".")[0]
|
64 |
+
label = LABEL_MAP[v["label"]]
|
65 |
+
imgs = glob(join(fold, "images", index, "*.png"))
|
66 |
+
fold_imgs.extend(imgs)
|
67 |
+
fold_tgts.extend([label] * len(imgs))
|
68 |
+
if label == 0:
|
69 |
+
self.num_real += len(imgs)
|
70 |
+
elif label == 1:
|
71 |
+
self.num_fake += len(imgs)
|
72 |
+
self.images.extend(fold_imgs)
|
73 |
+
self.targets.extend(fold_tgts)
|
74 |
+
except FileNotFoundError:
|
75 |
+
continue
|
76 |
+
|
77 |
+
|
78 |
+
if __name__ == '__main__':
|
79 |
+
import yaml
|
80 |
+
|
81 |
+
config_path = "../config/dataset/dfdc.yml"
|
82 |
+
with open(config_path) as config_file:
|
83 |
+
config = yaml.load(config_file, Loader=yaml.FullLoader)
|
84 |
+
config = config["train_cfg"]
|
85 |
+
# config = config["test_cfg"]
|
86 |
+
|
87 |
+
|
88 |
+
def run_dataset():
|
89 |
+
dataset = DFDC(config)
|
90 |
+
print(f"dataset: {len(dataset)}")
|
91 |
+
for i, _ in enumerate(dataset):
|
92 |
+
path, target = _
|
93 |
+
print(f"path: {path}, target: {target}")
|
94 |
+
if i >= 9:
|
95 |
+
break
|
96 |
+
|
97 |
+
|
98 |
+
def run_dataloader(display_samples=False):
|
99 |
+
from torch.utils import data
|
100 |
+
import matplotlib.pyplot as plt
|
101 |
+
|
102 |
+
dataset = DFDC(config)
|
103 |
+
dataloader = data.DataLoader(dataset, batch_size=8, shuffle=True)
|
104 |
+
print(f"dataset: {len(dataset)}")
|
105 |
+
for i, _ in enumerate(dataloader):
|
106 |
+
path, targets = _
|
107 |
+
image = dataloader.dataset.load_item(path)
|
108 |
+
print(f"image: {image.shape}, target: {targets}")
|
109 |
+
if display_samples:
|
110 |
+
plt.figure()
|
111 |
+
img = image[0].permute([1, 2, 0]).numpy()
|
112 |
+
plt.imshow(img)
|
113 |
+
# plt.savefig("./img_" + str(i) + ".png")
|
114 |
+
plt.show()
|
115 |
+
if i >= 9:
|
116 |
+
break
|
117 |
+
|
118 |
+
|
119 |
+
###########################
|
120 |
+
# run the functions below #
|
121 |
+
###########################
|
122 |
+
|
123 |
+
# run_dataset()
|
124 |
+
run_dataloader(False)
|
dataset/faceforensics.py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import numpy as np
|
3 |
+
from os.path import join
|
4 |
+
from dataset import AbstractDataset
|
5 |
+
|
6 |
+
METHOD = ['all', 'Deepfakes', 'Face2Face', 'FaceSwap', 'NeuralTextures']
|
7 |
+
SPLIT = ['train', 'val', 'test']
|
8 |
+
COMP2NAME = {'c0': 'raw', 'c23': 'c23', 'c40': 'c40'}
|
9 |
+
SOURCE_MAP = {'youtube': 2, 'Deepfakes': 3, 'Face2Face': 4, 'FaceSwap': 5, 'NeuralTextures': 6}
|
10 |
+
|
11 |
+
|
12 |
+
class FaceForensics(AbstractDataset):
|
13 |
+
"""
|
14 |
+
FaceForensics++ Dataset proposed in "FaceForensics++: Learning to Detect Manipulated Facial Images"
|
15 |
+
"""
|
16 |
+
|
17 |
+
def __init__(self, cfg, seed=2022, transforms=None, transform=None, target_transform=None):
|
18 |
+
# pre-check
|
19 |
+
if cfg['split'] not in SPLIT:
|
20 |
+
raise ValueError(f"split should be one of {SPLIT}, "
|
21 |
+
f"but found {cfg['split']}.")
|
22 |
+
if cfg['method'] not in METHOD:
|
23 |
+
raise ValueError(f"method should be one of {METHOD}, "
|
24 |
+
f"but found {cfg['method']}.")
|
25 |
+
if cfg['compression'] not in COMP2NAME.keys():
|
26 |
+
raise ValueError(f"compression should be one of {COMP2NAME.keys()}, "
|
27 |
+
f"but found {cfg['compression']}.")
|
28 |
+
super(FaceForensics, self).__init__(
|
29 |
+
cfg, seed, transforms, transform, target_transform)
|
30 |
+
print(f"Loading data from 'FF++ {cfg['method']}' of split '{cfg['split']}' "
|
31 |
+
f"and compression '{cfg['compression']}'\nPlease wait patiently...")
|
32 |
+
|
33 |
+
self.categories = ['original', 'fake']
|
34 |
+
# load the path of dataset images
|
35 |
+
indices = join(self.root, cfg['split'] + "_" + cfg['compression'] + ".pickle")
|
36 |
+
indices = torch.load(indices)
|
37 |
+
if cfg['method'] == "all":
|
38 |
+
# full dataset
|
39 |
+
self.images = [join(cfg['root'], _[0]) for _ in indices]
|
40 |
+
self.targets = [_[1] for _ in indices]
|
41 |
+
else:
|
42 |
+
# specific manipulated method
|
43 |
+
self.images = list()
|
44 |
+
self.targets = list()
|
45 |
+
nums = 0
|
46 |
+
for _ in indices:
|
47 |
+
if cfg['method'] in _[0]:
|
48 |
+
self.images.append(join(cfg['root'], _[0]))
|
49 |
+
self.targets.append(_[1])
|
50 |
+
nums = len(self.targets)
|
51 |
+
ori = list()
|
52 |
+
for _ in indices:
|
53 |
+
if "original_sequences" in _[0]:
|
54 |
+
ori.append(join(cfg['root'], _[0]))
|
55 |
+
choices = np.random.choice(ori, size=nums, replace=False)
|
56 |
+
self.images.extend(choices)
|
57 |
+
self.targets.extend([0] * nums)
|
58 |
+
print("Data from 'FF++' loaded.\n")
|
59 |
+
print(f"Dataset contains {len(self.images)} images.\n")
|
60 |
+
|
61 |
+
|
62 |
+
if __name__ == '__main__':
|
63 |
+
import yaml
|
64 |
+
|
65 |
+
config_path = "../config/dataset/faceforensics.yml"
|
66 |
+
with open(config_path) as config_file:
|
67 |
+
config = yaml.load(config_file, Loader=yaml.FullLoader)
|
68 |
+
config = config["train_cfg"]
|
69 |
+
# config = config["test_cfg"]
|
70 |
+
|
71 |
+
def run_dataset():
|
72 |
+
dataset = FaceForensics(config)
|
73 |
+
print(f"dataset: {len(dataset)}")
|
74 |
+
for i, _ in enumerate(dataset):
|
75 |
+
path, target = _
|
76 |
+
print(f"path: {path}, target: {target}")
|
77 |
+
if i >= 9:
|
78 |
+
break
|
79 |
+
|
80 |
+
|
81 |
+
def run_dataloader(display_samples=False):
|
82 |
+
from torch.utils import data
|
83 |
+
import matplotlib.pyplot as plt
|
84 |
+
|
85 |
+
dataset = FaceForensics(config)
|
86 |
+
dataloader = data.DataLoader(dataset, batch_size=8, shuffle=True)
|
87 |
+
print(f"dataset: {len(dataset)}")
|
88 |
+
for i, _ in enumerate(dataloader):
|
89 |
+
path, targets = _
|
90 |
+
image = dataloader.dataset.load_item(path)
|
91 |
+
print(f"image: {image.shape}, target: {targets}")
|
92 |
+
if display_samples:
|
93 |
+
plt.figure()
|
94 |
+
img = image[0].permute([1, 2, 0]).numpy()
|
95 |
+
plt.imshow(img)
|
96 |
+
# plt.savefig("./img_" + str(i) + ".png")
|
97 |
+
plt.show()
|
98 |
+
if i >= 9:
|
99 |
+
break
|
100 |
+
|
101 |
+
|
102 |
+
###########################
|
103 |
+
# run the functions below #
|
104 |
+
###########################
|
105 |
+
|
106 |
+
# run_dataset()
|
107 |
+
run_dataloader(False)
|
dataset/wild_deepfake.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import numpy as np
|
3 |
+
from os.path import join
|
4 |
+
from dataset import AbstractDataset
|
5 |
+
|
6 |
+
SPLITS = ["train", "test"]
|
7 |
+
|
8 |
+
|
9 |
+
class WildDeepfake(AbstractDataset):
|
10 |
+
"""
|
11 |
+
Wild Deepfake Dataset proposed in "WildDeepfake: A Challenging Real-World Dataset for Deepfake Detection"
|
12 |
+
"""
|
13 |
+
|
14 |
+
def __init__(self, cfg, seed=2022, transforms=None, transform=None, target_transform=None):
|
15 |
+
# pre-check
|
16 |
+
if cfg['split'] not in SPLITS:
|
17 |
+
raise ValueError(f"split should be one of {SPLITS}, but found {cfg['split']}.")
|
18 |
+
super(WildDeepfake, self).__init__(cfg, seed, transforms, transform, target_transform)
|
19 |
+
print(f"Loading data from 'WildDeepfake' of split '{cfg['split']}'"
|
20 |
+
f"\nPlease wait patiently...")
|
21 |
+
self.categories = ['original', 'fake']
|
22 |
+
self.root = cfg['root']
|
23 |
+
self.num_train = cfg.get('num_image_train', None)
|
24 |
+
self.num_test = cfg.get('num_image_test', None)
|
25 |
+
self.images, self.targets = self.__get_images()
|
26 |
+
print(f"Data from 'WildDeepfake' loaded.")
|
27 |
+
print(f"Dataset contains {len(self.images)} images.\n")
|
28 |
+
|
29 |
+
def __get_images(self):
|
30 |
+
if self.split == 'train':
|
31 |
+
num = self.num_train
|
32 |
+
elif self.split == 'test':
|
33 |
+
num = self.num_test
|
34 |
+
else:
|
35 |
+
num = None
|
36 |
+
real_images = torch.load(join(self.root, self.split, "real.pickle"))
|
37 |
+
if num is not None:
|
38 |
+
real_images = np.random.choice(real_images, num // 3, replace=False)
|
39 |
+
real_tgts = [torch.tensor(0)] * len(real_images)
|
40 |
+
print(f"real: {len(real_tgts)}")
|
41 |
+
fake_images = torch.load(join(self.root, self.split, "fake.pickle"))
|
42 |
+
if num is not None:
|
43 |
+
fake_images = np.random.choice(fake_images, num - num // 3, replace=False)
|
44 |
+
fake_tgts = [torch.tensor(1)] * len(fake_images)
|
45 |
+
print(f"fake: {len(fake_tgts)}")
|
46 |
+
return real_images + fake_images, real_tgts + fake_tgts
|
47 |
+
|
48 |
+
def __getitem__(self, index):
|
49 |
+
path = join(self.root, self.split, self.images[index])
|
50 |
+
tgt = self.targets[index]
|
51 |
+
return path, tgt
|
52 |
+
|
53 |
+
|
54 |
+
if __name__ == '__main__':
|
55 |
+
import yaml
|
56 |
+
|
57 |
+
config_path = "../config/dataset/wilddeepfake.yml"
|
58 |
+
with open(config_path) as config_file:
|
59 |
+
config = yaml.load(config_file, Loader=yaml.FullLoader)
|
60 |
+
config = config["train_cfg"]
|
61 |
+
# config = config["test_cfg"]
|
62 |
+
|
63 |
+
|
64 |
+
def run_dataset():
|
65 |
+
dataset = WildDeepfake(config)
|
66 |
+
print(f"dataset: {len(dataset)}")
|
67 |
+
for i, _ in enumerate(dataset):
|
68 |
+
path, target = _
|
69 |
+
print(f"path: {path}, target: {target}")
|
70 |
+
if i >= 9:
|
71 |
+
break
|
72 |
+
|
73 |
+
|
74 |
+
def run_dataloader(display_samples=False):
|
75 |
+
from torch.utils import data
|
76 |
+
import matplotlib.pyplot as plt
|
77 |
+
|
78 |
+
dataset = WildDeepfake(config)
|
79 |
+
dataloader = data.DataLoader(dataset, batch_size=8, shuffle=True)
|
80 |
+
print(f"dataset: {len(dataset)}")
|
81 |
+
for i, _ in enumerate(dataloader):
|
82 |
+
path, targets = _
|
83 |
+
image = dataloader.dataset.load_item(path)
|
84 |
+
print(f"image: {image.shape}, target: {targets}")
|
85 |
+
if display_samples:
|
86 |
+
plt.figure()
|
87 |
+
img = image[0].permute([1, 2, 0]).numpy()
|
88 |
+
plt.imshow(img)
|
89 |
+
# plt.savefig("./img_" + str(i) + ".png")
|
90 |
+
plt.show()
|
91 |
+
if i >= 9:
|
92 |
+
break
|
93 |
+
|
94 |
+
|
95 |
+
###########################
|
96 |
+
# run the functions below #
|
97 |
+
###########################
|
98 |
+
|
99 |
+
# run_dataset()
|
100 |
+
run_dataloader(False)
|
extract_video.py
ADDED
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from os.path import join
|
3 |
+
import argparse
|
4 |
+
import numpy as np
|
5 |
+
import cv2
|
6 |
+
import torch
|
7 |
+
from tqdm import tqdm
|
8 |
+
|
9 |
+
from data import cfg_mnet, cfg_re50
|
10 |
+
from layers.functions.prior_box import PriorBox
|
11 |
+
from utils.nms.py_cpu_nms import py_cpu_nms
|
12 |
+
from models.retinaface import RetinaFace
|
13 |
+
from utils.box_utils import decode
|
14 |
+
|
15 |
+
np.random.seed(0)
|
16 |
+
|
17 |
+
|
18 |
+
def check_keys(model, pretrained_state_dict):
|
19 |
+
ckpt_keys = set(pretrained_state_dict.keys())
|
20 |
+
model_keys = set(model.state_dict().keys())
|
21 |
+
used_pretrained_keys = model_keys & ckpt_keys
|
22 |
+
unused_pretrained_keys = ckpt_keys - model_keys
|
23 |
+
missing_keys = model_keys - ckpt_keys
|
24 |
+
print('Missing keys:{}'.format(len(missing_keys)))
|
25 |
+
print('Unused checkpoint keys:{}'.format(len(unused_pretrained_keys)))
|
26 |
+
print('Used keys:{}'.format(len(used_pretrained_keys)))
|
27 |
+
assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint'
|
28 |
+
return True
|
29 |
+
|
30 |
+
|
31 |
+
def remove_prefix(state_dict, prefix):
|
32 |
+
''' Old style model is stored with all names of parameters sharing common prefix 'module.' '''
|
33 |
+
print('remove prefix \'{}\''.format(prefix))
|
34 |
+
|
35 |
+
def f(x): return x.split(prefix, 1)[-1] if x.startswith(prefix) else x
|
36 |
+
|
37 |
+
return {f(key): value for key, value in state_dict.items()}
|
38 |
+
|
39 |
+
|
40 |
+
def load_model(model, pretrained_path, load_to_cpu):
|
41 |
+
print('Loading pretrained model from {}'.format(pretrained_path))
|
42 |
+
if load_to_cpu:
|
43 |
+
pretrained_dict = torch.load(
|
44 |
+
pretrained_path, map_location=lambda storage, loc: storage)
|
45 |
+
else:
|
46 |
+
pretrained_dict = torch.load(
|
47 |
+
pretrained_path, map_location=lambda storage, loc: storage.cuda(device))
|
48 |
+
if "state_dict" in pretrained_dict.keys():
|
49 |
+
pretrained_dict = remove_prefix(
|
50 |
+
pretrained_dict['state_dict'], 'module.')
|
51 |
+
else:
|
52 |
+
pretrained_dict = remove_prefix(pretrained_dict, 'module.')
|
53 |
+
check_keys(model, pretrained_dict)
|
54 |
+
model.load_state_dict(pretrained_dict, strict=False)
|
55 |
+
model.to(device)
|
56 |
+
return model
|
57 |
+
|
58 |
+
|
59 |
+
def detect(img_list, output_path, resize=1):
|
60 |
+
os.makedirs(output_path, exist_ok=True)
|
61 |
+
im_height, im_width, _ = img_list[0].shape
|
62 |
+
scale = torch.Tensor([im_width, im_height, im_width, im_height])
|
63 |
+
img_x = torch.stack(img_list, dim=0).permute([0, 3, 1, 2])
|
64 |
+
scale = scale.to(device)
|
65 |
+
|
66 |
+
# batch size
|
67 |
+
batch_size = args.bs
|
68 |
+
# forward times
|
69 |
+
f_times = img_x.shape[0] // batch_size
|
70 |
+
if img_x.shape[0] % batch_size != 0:
|
71 |
+
f_times += 1
|
72 |
+
locs_list = list()
|
73 |
+
confs_list = list()
|
74 |
+
for _ in range(f_times):
|
75 |
+
if _ != f_times - 1:
|
76 |
+
batch_img_x = img_x[_ * batch_size:(_ + 1) * batch_size]
|
77 |
+
else:
|
78 |
+
batch_img_x = img_x[_ * batch_size:] # last batch
|
79 |
+
batch_img_x = batch_img_x.to(device).float()
|
80 |
+
l, c, _ = net(batch_img_x)
|
81 |
+
locs_list.append(l)
|
82 |
+
confs_list.append(c)
|
83 |
+
locs = torch.cat(locs_list, dim=0)
|
84 |
+
confs = torch.cat(confs_list, dim=0)
|
85 |
+
|
86 |
+
priorbox = PriorBox(cfg, image_size=(im_height, im_width))
|
87 |
+
priors = priorbox.forward()
|
88 |
+
priors = priors.to(device)
|
89 |
+
prior_data = priors.data
|
90 |
+
|
91 |
+
img_cpu = img_x.permute([0, 2, 3, 1]).cpu().numpy()
|
92 |
+
i = 0
|
93 |
+
for img, loc, conf in zip(img_cpu, locs, confs):
|
94 |
+
boxes = decode(loc.data, prior_data, cfg['variance'])
|
95 |
+
boxes = boxes * scale / resize
|
96 |
+
boxes = boxes.cpu().numpy()
|
97 |
+
scores = conf.data.cpu().numpy()[:, 1]
|
98 |
+
|
99 |
+
# ignore low scores
|
100 |
+
inds = np.where(scores > args.confidence_threshold)[0]
|
101 |
+
boxes = boxes[inds]
|
102 |
+
scores = scores[inds]
|
103 |
+
|
104 |
+
# keep top-K before NMS
|
105 |
+
order = scores.argsort()[::-1][:args.top_k]
|
106 |
+
boxes = boxes[order]
|
107 |
+
scores = scores[order]
|
108 |
+
|
109 |
+
# do NMS
|
110 |
+
dets = np.hstack((boxes, scores[:, np.newaxis])).astype(
|
111 |
+
np.float32, copy=False)
|
112 |
+
keep = py_cpu_nms(dets, args.nms_threshold)
|
113 |
+
# keep = nms(dets, args.nms_threshold,force_cpu=args.cpu)
|
114 |
+
dets = dets[keep, :]
|
115 |
+
|
116 |
+
# keep top-K faster NMS
|
117 |
+
dets = dets[:args.keep_top_k, :]
|
118 |
+
|
119 |
+
if len(dets) == 0:
|
120 |
+
continue
|
121 |
+
det = list(map(int, dets[0]))
|
122 |
+
x, y, size_bb_x, size_bb_y = get_boundingbox(det, img.shape[1], img.shape[0])
|
123 |
+
cropped_img = img[y:y + size_bb_y, x:x + size_bb_x, :] + (104, 117, 123)
|
124 |
+
cv2.imwrite(join(output_path, '{:04d}.png'.format(i)), cropped_img)
|
125 |
+
i += 1
|
126 |
+
pass
|
127 |
+
|
128 |
+
|
129 |
+
def extract_frames(data_path, interval=1):
|
130 |
+
"""Method to extract frames"""
|
131 |
+
if data_path.split('.')[-1] == "mp4":
|
132 |
+
reader = cv2.VideoCapture(data_path)
|
133 |
+
frame_num = 0
|
134 |
+
frames = list()
|
135 |
+
|
136 |
+
while reader.isOpened():
|
137 |
+
success, image = reader.read()
|
138 |
+
if not success:
|
139 |
+
break
|
140 |
+
cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
141 |
+
image = torch.tensor(image) - torch.tensor([104, 117, 123])
|
142 |
+
if frame_num % interval == 0:
|
143 |
+
frames.append(image)
|
144 |
+
frame_num += 1
|
145 |
+
if len(frames) > args.max_frames:
|
146 |
+
break
|
147 |
+
reader.release()
|
148 |
+
if len(frames) > args.max_frames:
|
149 |
+
samples = np.random.choice(
|
150 |
+
np.arange(0, len(frames)), size=args.max_frames, replace=False)
|
151 |
+
return [frames[_] for _ in samples]
|
152 |
+
return frames
|
153 |
+
else:
|
154 |
+
image = cv2.imread(data_path)
|
155 |
+
cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
156 |
+
image = torch.tensor(image) - torch.tensor([104, 117, 123])
|
157 |
+
return [image]
|
158 |
+
|
159 |
+
|
160 |
+
def get_boundingbox(bbox, width, height, scale=1.8, minsize=None):
|
161 |
+
x1 = bbox[0]
|
162 |
+
y1 = bbox[1]
|
163 |
+
x2 = bbox[2]
|
164 |
+
y2 = bbox[3]
|
165 |
+
size_bb_x = int((x2 - x1) * scale)
|
166 |
+
size_bb_y = int((y2 - y1) * scale)
|
167 |
+
if minsize:
|
168 |
+
if size_bb_x < minsize:
|
169 |
+
size_bb_x = minsize
|
170 |
+
if size_bb_y < minsize:
|
171 |
+
size_bb_y = minsize
|
172 |
+
center_x, center_y = (x1 + x2) // 2, (y1 + y2) // 2
|
173 |
+
|
174 |
+
# Check for out of bounds, x-y top left corner
|
175 |
+
x1 = max(int(center_x - size_bb_x // 2), 0)
|
176 |
+
y1 = max(int(center_y - size_bb_y // 2), 0)
|
177 |
+
# Check for too big bb size for given x, y
|
178 |
+
size_bb_x = min(width - x1, size_bb_x)
|
179 |
+
size_bb_y = min(height - y1, size_bb_y)
|
180 |
+
return x1, y1, size_bb_x, size_bb_y
|
181 |
+
|
182 |
+
|
183 |
+
def extract_method_videos(data_path, interval):
|
184 |
+
video = data_path.split('/')[-1]
|
185 |
+
result_path = '/'.join(data_path.split('/')[:-1])
|
186 |
+
images_path = join(result_path, 'images')
|
187 |
+
|
188 |
+
image_folder = video.split('.')[0]
|
189 |
+
try:
|
190 |
+
print(data_path)
|
191 |
+
image_list = extract_frames(data_path, interval)
|
192 |
+
detect(image_list, join(images_path, image_folder))
|
193 |
+
except Exception as ex:
|
194 |
+
f = open("failure.txt", "a", encoding="utf-8")
|
195 |
+
f.writelines(image_folder +
|
196 |
+
f" Exception for {image_folder}: {ex}\n")
|
197 |
+
f.close()
|
198 |
+
|
199 |
+
|
200 |
+
if __name__ == '__main__':
|
201 |
+
p = argparse.ArgumentParser(
|
202 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
203 |
+
)
|
204 |
+
p.add_argument('--data_path', '-p', type=str, help='path to the data')
|
205 |
+
p.add_argument('--confidence_threshold', default=0.05,
|
206 |
+
type=float, help='confidence threshold')
|
207 |
+
p.add_argument('--top_k', default=5, type=int, help='top_k')
|
208 |
+
p.add_argument('--nms_threshold', default=0.4,
|
209 |
+
type=float, help='nms threshold')
|
210 |
+
p.add_argument('--keep_top_k', default=1, type=int, help='keep_top_k')
|
211 |
+
p.add_argument('--bs', default=32, type=int, help='batch size')
|
212 |
+
p.add_argument('--frame_interval', '-fi', default=1, type=int, help='frame interval')
|
213 |
+
p.add_argument('--device', "-d", default="cuda:0", type=str, help='device')
|
214 |
+
p.add_argument('--max_frames', default=100, type=int, help='maximum frames per video')
|
215 |
+
|
216 |
+
args = p.parse_args()
|
217 |
+
|
218 |
+
torch.set_grad_enabled(False)
|
219 |
+
# use resnet-50
|
220 |
+
cfg = cfg_re50
|
221 |
+
pretrained_weights = './weights/Resnet50_Final.pth'
|
222 |
+
|
223 |
+
torch.backends.cudnn.benchmark = True
|
224 |
+
device = torch.device(args.device)
|
225 |
+
print(device)
|
226 |
+
|
227 |
+
# net and model
|
228 |
+
net = RetinaFace(cfg=cfg, phase='test')
|
229 |
+
net = load_model(net, pretrained_weights, args.device)
|
230 |
+
net.eval()
|
231 |
+
print('Finished loading model!')
|
232 |
+
|
233 |
+
extract_method_videos(args.data_path, args.frame_interval)
|
inference.py
ADDED
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import torch
|
3 |
+
import random
|
4 |
+
import argparse
|
5 |
+
from glob import glob
|
6 |
+
from os.path import join
|
7 |
+
from model.network import Recce
|
8 |
+
from model.common import freeze_weights
|
9 |
+
from albumentations import Compose, Normalize, Resize
|
10 |
+
from albumentations.pytorch.transforms import ToTensorV2
|
11 |
+
|
12 |
+
import os
|
13 |
+
|
14 |
+
os.environ['KMP_DUPLICATE_LIB_OK']='True'
|
15 |
+
|
16 |
+
# fix random seed
|
17 |
+
seed = 0
|
18 |
+
random.seed(seed)
|
19 |
+
torch.manual_seed(seed)
|
20 |
+
torch.cuda.manual_seed(seed)
|
21 |
+
torch.cuda.manual_seed_all(seed)
|
22 |
+
|
23 |
+
parser = argparse.ArgumentParser(description="This code helps you use a trained model to "
|
24 |
+
"do inference.")
|
25 |
+
parser.add_argument("--weight", "-w",
|
26 |
+
type=str,
|
27 |
+
default=None,
|
28 |
+
help="Specify the path to the model weight (the state dict file). "
|
29 |
+
"Do not use this argument when '--bin' is set.")
|
30 |
+
parser.add_argument("--bin", "-b",
|
31 |
+
type=str,
|
32 |
+
default=None,
|
33 |
+
help="Specify the path to the model bin which ends up with '.bin' "
|
34 |
+
"(which is generated by the trainer of this project). "
|
35 |
+
"Do not use this argument when '--weight' is set.")
|
36 |
+
parser.add_argument("--image", "-i",
|
37 |
+
type=str,
|
38 |
+
default=None,
|
39 |
+
help="Specify the path to the input image. "
|
40 |
+
"Do not use this argument when '--image_folder' is set.")
|
41 |
+
parser.add_argument("--image_folder", "-f",
|
42 |
+
type=str,
|
43 |
+
default=None,
|
44 |
+
help="Specify the directory to evaluate all the images. "
|
45 |
+
"Do not use this argument when '--image' is set.")
|
46 |
+
|
47 |
+
parser.add_argument('--device', '-d', type=str,
|
48 |
+
default="cpu",
|
49 |
+
help="Specify the device to load the model. Default: 'cpu'.")
|
50 |
+
parser.add_argument('--image_size', '-s', type=int,
|
51 |
+
default=299,
|
52 |
+
help="Specify the spatial size of the input image(s). Default: 299.")
|
53 |
+
parser.add_argument('--visualize', '-v', action="store_true",
|
54 |
+
default=False, help='Visualize images.')
|
55 |
+
|
56 |
+
|
57 |
+
def preprocess(file_path):
|
58 |
+
img = cv2.imread(file_path)
|
59 |
+
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
60 |
+
compose = Compose([Resize(height=args.image_size, width=args.image_size),
|
61 |
+
Normalize(mean=[0.5] * 3, std=[0.5] * 3),
|
62 |
+
ToTensorV2()])
|
63 |
+
img = compose(image=img)['image'].unsqueeze(0)
|
64 |
+
return img
|
65 |
+
|
66 |
+
|
67 |
+
def prepare_data():
|
68 |
+
paths = list()
|
69 |
+
images = list()
|
70 |
+
# check the console arguments
|
71 |
+
if args.image and args.image_folder:
|
72 |
+
raise ValueError("Only one of '--image' or '--image_folder' can be set.")
|
73 |
+
elif args.image:
|
74 |
+
images.append(preprocess(args.image))
|
75 |
+
paths.append(args.image)
|
76 |
+
elif args.image_folder:
|
77 |
+
image_folder = '.'.join(args.image_folder.split('.')[:-1])
|
78 |
+
image_paths = glob(image_folder + "/*.jpg")
|
79 |
+
image_paths.extend(glob(image_folder + "/*.png"))
|
80 |
+
for _ in image_paths:
|
81 |
+
images.append(preprocess(_))
|
82 |
+
paths.append(_)
|
83 |
+
else:
|
84 |
+
raise ValueError("Neither of '--image' nor '--image_folder' is set. Please specify either "
|
85 |
+
"one of these two arguments to load input image(s) properly.")
|
86 |
+
return paths, images
|
87 |
+
|
88 |
+
|
89 |
+
def inference(model, images, paths, device):
|
90 |
+
mean_pred = 0
|
91 |
+
for img, pt in zip(images, paths):
|
92 |
+
img = img.to(device)
|
93 |
+
prediction = model(img)
|
94 |
+
prediction = torch.sigmoid(prediction).cpu()
|
95 |
+
fake = True if prediction >= 0.5 else False
|
96 |
+
|
97 |
+
mean_pred += prediction.item()
|
98 |
+
|
99 |
+
print(f"path: {pt} \t\t| fake probability: {prediction.item():.4f} \t| "
|
100 |
+
f"prediction: {'fake' if fake else 'real'}")
|
101 |
+
if args.visualize:
|
102 |
+
cvimg = cv2.imread(pt)
|
103 |
+
cvimg = cv2.putText(cvimg, f'p: {prediction.item():.2f}, ' + f"{'fake' if fake else 'real'}",
|
104 |
+
(5, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
|
105 |
+
(0, 0, 255) if fake else (255, 0, 0), 2)
|
106 |
+
cv2.imshow("image", cvimg)
|
107 |
+
cv2.waitKey(0)
|
108 |
+
cv2.destroyWindow("image")
|
109 |
+
mean_pred = mean_pred / len(images)
|
110 |
+
return mean_pred
|
111 |
+
|
112 |
+
|
113 |
+
def main():
|
114 |
+
print("Arguments:\n", args, end="\n\n")
|
115 |
+
# set device
|
116 |
+
device = torch.device(args.device)
|
117 |
+
# load model
|
118 |
+
model = eval("Recce")(num_classes=1)
|
119 |
+
# check the console arguments
|
120 |
+
if args.weight and args.bin:
|
121 |
+
raise ValueError("Only one of '--weight' or '--bin' can be set.")
|
122 |
+
elif args.weight:
|
123 |
+
weights = torch.load(args.weight, map_location="cpu")
|
124 |
+
elif args.bin:
|
125 |
+
weights = torch.load(args.bin, map_location="cpu")["model"]
|
126 |
+
else:
|
127 |
+
raise ValueError("Neither of '--weight' nor '--bin' is set. Please specify either "
|
128 |
+
"one of these two arguments to load model's weight properly.")
|
129 |
+
model.load_state_dict(weights)
|
130 |
+
model = model.to(device)
|
131 |
+
freeze_weights(model)
|
132 |
+
model.eval()
|
133 |
+
|
134 |
+
paths, images = prepare_data()
|
135 |
+
print("Inference:")
|
136 |
+
mean_pred = inference(model, images=images, paths=paths, device=device)
|
137 |
+
print("Mean prediction:", mean_pred)
|
138 |
+
|
139 |
+
|
140 |
+
if __name__ == '__main__':
|
141 |
+
args = parser.parse_args()
|
142 |
+
main()
|
layers/__init__.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
from .functions import *
|
2 |
+
from .modules import *
|
layers/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (210 Bytes). View file
|
|
layers/functions/__pycache__/prior_box.cpython-39.pyc
ADDED
Binary file (1.86 kB). View file
|
|
layers/functions/prior_box.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from itertools import product as product
|
3 |
+
import numpy as np
|
4 |
+
from math import ceil
|
5 |
+
|
6 |
+
|
7 |
+
class PriorBox(object):
|
8 |
+
def __init__(self, cfg, image_size=None, phase='train'):
|
9 |
+
super(PriorBox, self).__init__()
|
10 |
+
self.min_sizes = cfg['min_sizes']
|
11 |
+
self.steps = cfg['steps']
|
12 |
+
self.clip = cfg['clip']
|
13 |
+
self.image_size = image_size
|
14 |
+
self.feature_maps = [[ceil(self.image_size[0]/step), ceil(self.image_size[1]/step)] for step in self.steps]
|
15 |
+
self.name = "s"
|
16 |
+
|
17 |
+
def forward(self):
|
18 |
+
anchors = []
|
19 |
+
for k, f in enumerate(self.feature_maps):
|
20 |
+
min_sizes = self.min_sizes[k]
|
21 |
+
for i, j in product(range(f[0]), range(f[1])):
|
22 |
+
for min_size in min_sizes:
|
23 |
+
s_kx = min_size / self.image_size[1]
|
24 |
+
s_ky = min_size / self.image_size[0]
|
25 |
+
dense_cx = [x * self.steps[k] / self.image_size[1] for x in [j + 0.5]]
|
26 |
+
dense_cy = [y * self.steps[k] / self.image_size[0] for y in [i + 0.5]]
|
27 |
+
for cy, cx in product(dense_cy, dense_cx):
|
28 |
+
anchors += [cx, cy, s_kx, s_ky]
|
29 |
+
|
30 |
+
# back to torch land
|
31 |
+
output = torch.Tensor(anchors).view(-1, 4)
|
32 |
+
if self.clip:
|
33 |
+
output.clamp_(max=1, min=0)
|
34 |
+
return output
|
layers/modules/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from .multibox_loss import MultiBoxLoss
|
2 |
+
|
3 |
+
__all__ = ['MultiBoxLoss']
|
layers/modules/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (245 Bytes). View file
|
|
layers/modules/__pycache__/multibox_loss.cpython-39.pyc
ADDED
Binary file (4.27 kB). View file
|
|
layers/modules/multibox_loss.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
from utils.box_utils import match, log_sum_exp
|
6 |
+
from data import cfg_mnet
|
7 |
+
GPU = cfg_mnet['gpu_train']
|
8 |
+
|
9 |
+
class MultiBoxLoss(nn.Module):
|
10 |
+
"""SSD Weighted Loss Function
|
11 |
+
Compute Targets:
|
12 |
+
1) Produce Confidence Target Indices by matching ground truth boxes
|
13 |
+
with (default) 'priorboxes' that have jaccard index > threshold parameter
|
14 |
+
(default threshold: 0.5).
|
15 |
+
2) Produce localization target by 'encoding' variance into offsets of ground
|
16 |
+
truth boxes and their matched 'priorboxes'.
|
17 |
+
3) Hard negative mining to filter the excessive number of negative examples
|
18 |
+
that comes with using a large number of default bounding boxes.
|
19 |
+
(default negative:positive ratio 3:1)
|
20 |
+
Objective Loss:
|
21 |
+
L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
|
22 |
+
Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss
|
23 |
+
weighted by α which is set to 1 by cross val.
|
24 |
+
Args:
|
25 |
+
c: class confidences,
|
26 |
+
l: predicted boxes,
|
27 |
+
g: ground truth boxes
|
28 |
+
N: number of matched default boxes
|
29 |
+
See: https://arxiv.org/pdf/1512.02325.pdf for more details.
|
30 |
+
"""
|
31 |
+
|
32 |
+
def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target):
|
33 |
+
super(MultiBoxLoss, self).__init__()
|
34 |
+
self.num_classes = num_classes
|
35 |
+
self.threshold = overlap_thresh
|
36 |
+
self.background_label = bkg_label
|
37 |
+
self.encode_target = encode_target
|
38 |
+
self.use_prior_for_matching = prior_for_matching
|
39 |
+
self.do_neg_mining = neg_mining
|
40 |
+
self.negpos_ratio = neg_pos
|
41 |
+
self.neg_overlap = neg_overlap
|
42 |
+
self.variance = [0.1, 0.2]
|
43 |
+
|
44 |
+
def forward(self, predictions, priors, targets):
|
45 |
+
"""Multibox Loss
|
46 |
+
Args:
|
47 |
+
predictions (tuple): A tuple containing loc preds, conf preds,
|
48 |
+
and prior boxes from SSD net.
|
49 |
+
conf shape: torch.size(batch_size,num_priors,num_classes)
|
50 |
+
loc shape: torch.size(batch_size,num_priors,4)
|
51 |
+
priors shape: torch.size(num_priors,4)
|
52 |
+
|
53 |
+
ground_truth (tensor): Ground truth boxes and labels for a batch,
|
54 |
+
shape: [batch_size,num_objs,5] (last idx is the label).
|
55 |
+
"""
|
56 |
+
|
57 |
+
loc_data, conf_data, landm_data = predictions
|
58 |
+
priors = priors
|
59 |
+
num = loc_data.size(0)
|
60 |
+
num_priors = (priors.size(0))
|
61 |
+
|
62 |
+
# match priors (default boxes) and ground truth boxes
|
63 |
+
loc_t = torch.Tensor(num, num_priors, 4)
|
64 |
+
landm_t = torch.Tensor(num, num_priors, 10)
|
65 |
+
conf_t = torch.LongTensor(num, num_priors)
|
66 |
+
for idx in range(num):
|
67 |
+
truths = targets[idx][:, :4].data
|
68 |
+
labels = targets[idx][:, -1].data
|
69 |
+
landms = targets[idx][:, 4:14].data
|
70 |
+
defaults = priors.data
|
71 |
+
match(self.threshold, truths, defaults, self.variance, labels, landms, loc_t, conf_t, landm_t, idx)
|
72 |
+
if GPU:
|
73 |
+
loc_t = loc_t.cuda()
|
74 |
+
conf_t = conf_t.cuda()
|
75 |
+
landm_t = landm_t.cuda()
|
76 |
+
|
77 |
+
zeros = torch.tensor(0).cuda()
|
78 |
+
# landm Loss (Smooth L1)
|
79 |
+
# Shape: [batch,num_priors,10]
|
80 |
+
pos1 = conf_t > zeros
|
81 |
+
num_pos_landm = pos1.long().sum(1, keepdim=True)
|
82 |
+
N1 = max(num_pos_landm.data.sum().float(), 1)
|
83 |
+
pos_idx1 = pos1.unsqueeze(pos1.dim()).expand_as(landm_data)
|
84 |
+
landm_p = landm_data[pos_idx1].view(-1, 10)
|
85 |
+
landm_t = landm_t[pos_idx1].view(-1, 10)
|
86 |
+
loss_landm = F.smooth_l1_loss(landm_p, landm_t, reduction='sum')
|
87 |
+
|
88 |
+
|
89 |
+
pos = conf_t != zeros
|
90 |
+
conf_t[pos] = 1
|
91 |
+
|
92 |
+
# Localization Loss (Smooth L1)
|
93 |
+
# Shape: [batch,num_priors,4]
|
94 |
+
pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data)
|
95 |
+
loc_p = loc_data[pos_idx].view(-1, 4)
|
96 |
+
loc_t = loc_t[pos_idx].view(-1, 4)
|
97 |
+
loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='sum')
|
98 |
+
|
99 |
+
# Compute max conf across batch for hard negative mining
|
100 |
+
batch_conf = conf_data.view(-1, self.num_classes)
|
101 |
+
loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1))
|
102 |
+
|
103 |
+
# Hard Negative Mining
|
104 |
+
loss_c[pos.view(-1, 1)] = 0 # filter out pos boxes for now
|
105 |
+
loss_c = loss_c.view(num, -1)
|
106 |
+
_, loss_idx = loss_c.sort(1, descending=True)
|
107 |
+
_, idx_rank = loss_idx.sort(1)
|
108 |
+
num_pos = pos.long().sum(1, keepdim=True)
|
109 |
+
num_neg = torch.clamp(self.negpos_ratio*num_pos, max=pos.size(1)-1)
|
110 |
+
neg = idx_rank < num_neg.expand_as(idx_rank)
|
111 |
+
|
112 |
+
# Confidence Loss Including Positive and Negative Examples
|
113 |
+
pos_idx = pos.unsqueeze(2).expand_as(conf_data)
|
114 |
+
neg_idx = neg.unsqueeze(2).expand_as(conf_data)
|
115 |
+
conf_p = conf_data[(pos_idx+neg_idx).gt(0)].view(-1,self.num_classes)
|
116 |
+
targets_weighted = conf_t[(pos+neg).gt(0)]
|
117 |
+
loss_c = F.cross_entropy(conf_p, targets_weighted, reduction='sum')
|
118 |
+
|
119 |
+
# Sum of losses: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
|
120 |
+
N = max(num_pos.data.sum().float(), 1)
|
121 |
+
loss_l /= N
|
122 |
+
loss_c /= N
|
123 |
+
loss_landm /= N1
|
124 |
+
|
125 |
+
return loss_l, loss_c, loss_landm
|
loss/__init__.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch.nn as nn
|
2 |
+
|
3 |
+
|
4 |
+
def get_loss(name="cross_entropy", device="cuda:0"):
|
5 |
+
print(f"Using loss: '{LOSSES[name]}'")
|
6 |
+
return LOSSES[name].to(device)
|
7 |
+
|
8 |
+
|
9 |
+
LOSSES = {
|
10 |
+
"binary_ce": nn.BCEWithLogitsLoss(),
|
11 |
+
"cross_entropy": nn.CrossEntropyLoss()
|
12 |
+
}
|
model/__init__.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .network import *
|
2 |
+
from .common import *
|
3 |
+
|
4 |
+
MODELS = {
|
5 |
+
"Recce": Recce
|
6 |
+
}
|
7 |
+
|
8 |
+
|
9 |
+
def load_model(name="Recce"):
|
10 |
+
assert name in MODELS.keys(), f"Model name can only be one of {MODELS.keys()}."
|
11 |
+
print(f"Using model: '{name}'")
|
12 |
+
return MODELS[name]
|
model/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (454 Bytes). View file
|
|
model/__pycache__/common.cpython-39.pyc
ADDED
Binary file (5.91 kB). View file
|
|
model/common.py
ADDED
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
|
5 |
+
|
6 |
+
def freeze_weights(module):
|
7 |
+
for param in module.parameters():
|
8 |
+
param.requires_grad = False
|
9 |
+
|
10 |
+
|
11 |
+
def l1_regularize(module):
|
12 |
+
reg_loss = 0.
|
13 |
+
for key, param in module.reg_params.items():
|
14 |
+
if "weight" in key and param.requires_grad:
|
15 |
+
reg_loss += torch.sum(torch.abs(param))
|
16 |
+
return reg_loss
|
17 |
+
|
18 |
+
|
19 |
+
class SeparableConv2d(nn.Module):
|
20 |
+
def __init__(self, in_channels, out_channels, kernel_size=1, stride=1, padding=0, dilation=1, bias=False):
|
21 |
+
super(SeparableConv2d, self).__init__()
|
22 |
+
|
23 |
+
self.conv1 = nn.Conv2d(in_channels, in_channels, kernel_size, stride, padding, dilation,
|
24 |
+
groups=in_channels, bias=bias)
|
25 |
+
self.pointwise = nn.Conv2d(in_channels, out_channels, 1, 1, 0, 1, 1, bias=bias)
|
26 |
+
|
27 |
+
def forward(self, x):
|
28 |
+
x = self.conv1(x)
|
29 |
+
x = self.pointwise(x)
|
30 |
+
return x
|
31 |
+
|
32 |
+
|
33 |
+
class Block(nn.Module):
|
34 |
+
def __init__(self, in_channels, out_channels, reps, strides=1,
|
35 |
+
start_with_relu=True, grow_first=True, with_bn=True):
|
36 |
+
super(Block, self).__init__()
|
37 |
+
|
38 |
+
self.with_bn = with_bn
|
39 |
+
|
40 |
+
if out_channels != in_channels or strides != 1:
|
41 |
+
self.skip = nn.Conv2d(in_channels, out_channels, 1, stride=strides, bias=False)
|
42 |
+
if with_bn:
|
43 |
+
self.skipbn = nn.BatchNorm2d(out_channels)
|
44 |
+
else:
|
45 |
+
self.skip = None
|
46 |
+
|
47 |
+
rep = []
|
48 |
+
for i in range(reps):
|
49 |
+
if grow_first:
|
50 |
+
inc = in_channels if i == 0 else out_channels
|
51 |
+
outc = out_channels
|
52 |
+
else:
|
53 |
+
inc = in_channels
|
54 |
+
outc = in_channels if i < (reps - 1) else out_channels
|
55 |
+
rep.append(nn.ReLU(inplace=True))
|
56 |
+
rep.append(SeparableConv2d(inc, outc, 3, stride=1, padding=1))
|
57 |
+
if with_bn:
|
58 |
+
rep.append(nn.BatchNorm2d(outc))
|
59 |
+
|
60 |
+
if not start_with_relu:
|
61 |
+
rep = rep[1:]
|
62 |
+
else:
|
63 |
+
rep[0] = nn.ReLU(inplace=False)
|
64 |
+
|
65 |
+
if strides != 1:
|
66 |
+
rep.append(nn.MaxPool2d(3, strides, 1))
|
67 |
+
self.rep = nn.Sequential(*rep)
|
68 |
+
|
69 |
+
def forward(self, inp):
|
70 |
+
x = self.rep(inp)
|
71 |
+
|
72 |
+
if self.skip is not None:
|
73 |
+
skip = self.skip(inp)
|
74 |
+
if self.with_bn:
|
75 |
+
skip = self.skipbn(skip)
|
76 |
+
else:
|
77 |
+
skip = inp
|
78 |
+
|
79 |
+
x += skip
|
80 |
+
return x
|
81 |
+
|
82 |
+
|
83 |
+
class GraphReasoning(nn.Module):
|
84 |
+
""" Graph Reasoning Module for information aggregation. """
|
85 |
+
|
86 |
+
def __init__(self, va_in, va_out, vb_in, vb_out, vc_in, vc_out, spatial_ratio, drop_rate):
|
87 |
+
super(GraphReasoning, self).__init__()
|
88 |
+
self.ratio = spatial_ratio
|
89 |
+
self.va_embedding = nn.Sequential(
|
90 |
+
nn.Conv2d(va_in, va_out, 1, bias=False),
|
91 |
+
nn.ReLU(True),
|
92 |
+
nn.Conv2d(va_out, va_out, 1, bias=False),
|
93 |
+
)
|
94 |
+
self.va_gated_b = nn.Sequential(
|
95 |
+
nn.Conv2d(va_in, va_out, 1, bias=False),
|
96 |
+
nn.Sigmoid()
|
97 |
+
)
|
98 |
+
self.va_gated_c = nn.Sequential(
|
99 |
+
nn.Conv2d(va_in, va_out, 1, bias=False),
|
100 |
+
nn.Sigmoid()
|
101 |
+
)
|
102 |
+
self.vb_embedding = nn.Sequential(
|
103 |
+
nn.Linear(vb_in, vb_out, bias=False),
|
104 |
+
nn.ReLU(True),
|
105 |
+
nn.Linear(vb_out, vb_out, bias=False),
|
106 |
+
)
|
107 |
+
self.vc_embedding = nn.Sequential(
|
108 |
+
nn.Linear(vc_in, vc_out, bias=False),
|
109 |
+
nn.ReLU(True),
|
110 |
+
nn.Linear(vc_out, vc_out, bias=False),
|
111 |
+
)
|
112 |
+
self.unfold_b = nn.Unfold(kernel_size=spatial_ratio[0], stride=spatial_ratio[0])
|
113 |
+
self.unfold_c = nn.Unfold(kernel_size=spatial_ratio[1], stride=spatial_ratio[1])
|
114 |
+
self.reweight_ab = nn.Sequential(
|
115 |
+
nn.Linear(va_out + vb_out, 1, bias=False),
|
116 |
+
nn.ReLU(True),
|
117 |
+
nn.Softmax(dim=1)
|
118 |
+
)
|
119 |
+
self.reweight_ac = nn.Sequential(
|
120 |
+
nn.Linear(va_out + vc_out, 1, bias=False),
|
121 |
+
nn.ReLU(True),
|
122 |
+
nn.Softmax(dim=1)
|
123 |
+
)
|
124 |
+
self.reproject = nn.Sequential(
|
125 |
+
nn.Conv2d(va_out + vb_out + vc_out, va_in, kernel_size=1, bias=False),
|
126 |
+
nn.ReLU(True),
|
127 |
+
nn.Conv2d(va_in, va_in, kernel_size=1, bias=False),
|
128 |
+
nn.Dropout(drop_rate) if drop_rate is not None else nn.Identity(),
|
129 |
+
)
|
130 |
+
|
131 |
+
def forward(self, vert_a, vert_b, vert_c):
|
132 |
+
emb_vert_a = self.va_embedding(vert_a)
|
133 |
+
emb_vert_a = emb_vert_a.reshape([emb_vert_a.shape[0], emb_vert_a.shape[1], -1])
|
134 |
+
|
135 |
+
gate_vert_b = 1 - self.va_gated_b(vert_a)
|
136 |
+
gate_vert_b = gate_vert_b.reshape(*emb_vert_a.shape)
|
137 |
+
gate_vert_c = 1 - self.va_gated_c(vert_a)
|
138 |
+
gate_vert_c = gate_vert_c.reshape(*emb_vert_a.shape)
|
139 |
+
|
140 |
+
vert_b = self.unfold_b(vert_b).reshape(
|
141 |
+
[vert_b.shape[0], vert_b.shape[1], self.ratio[0] * self.ratio[0], -1])
|
142 |
+
vert_b = vert_b.permute([0, 2, 3, 1])
|
143 |
+
emb_vert_b = self.vb_embedding(vert_b)
|
144 |
+
|
145 |
+
vert_c = self.unfold_c(vert_c).reshape(
|
146 |
+
[vert_c.shape[0], vert_c.shape[1], self.ratio[1] * self.ratio[1], -1])
|
147 |
+
vert_c = vert_c.permute([0, 2, 3, 1])
|
148 |
+
emb_vert_c = self.vc_embedding(vert_c)
|
149 |
+
|
150 |
+
agg_vb = list()
|
151 |
+
agg_vc = list()
|
152 |
+
for j in range(emb_vert_a.shape[-1]):
|
153 |
+
# ab propagating
|
154 |
+
emb_v_a = torch.stack([emb_vert_a[:, :, j]] * (self.ratio[0] ** 2), dim=1)
|
155 |
+
emb_v_b = emb_vert_b[:, :, j, :]
|
156 |
+
emb_v_ab = torch.cat([emb_v_a, emb_v_b], dim=-1)
|
157 |
+
w = self.reweight_ab(emb_v_ab)
|
158 |
+
agg_vb.append(torch.bmm(emb_v_b.transpose(1, 2), w).squeeze() * gate_vert_b[:, :, j])
|
159 |
+
|
160 |
+
# ac propagating
|
161 |
+
emb_v_a = torch.stack([emb_vert_a[:, :, j]] * (self.ratio[1] ** 2), dim=1)
|
162 |
+
emb_v_c = emb_vert_c[:, :, j, :]
|
163 |
+
emb_v_ac = torch.cat([emb_v_a, emb_v_c], dim=-1)
|
164 |
+
w = self.reweight_ac(emb_v_ac)
|
165 |
+
agg_vc.append(torch.bmm(emb_v_c.transpose(1, 2), w).squeeze() * gate_vert_c[:, :, j])
|
166 |
+
|
167 |
+
agg_vert_b = torch.stack(agg_vb, dim=-1)
|
168 |
+
agg_vert_c = torch.stack(agg_vc, dim=-1)
|
169 |
+
agg_vert_bc = torch.cat([agg_vert_b, agg_vert_c], dim=1)
|
170 |
+
agg_vert_abc = torch.cat([agg_vert_bc, emb_vert_a], dim=1)
|
171 |
+
agg_vert_abc = torch.sigmoid(agg_vert_abc)
|
172 |
+
agg_vert_abc = agg_vert_abc.reshape(vert_a.shape[0], -1, vert_a.shape[2], vert_a.shape[3])
|
173 |
+
return self.reproject(agg_vert_abc)
|
174 |
+
|
175 |
+
|
176 |
+
class GuidedAttention(nn.Module):
|
177 |
+
""" Reconstruction Guided Attention. """
|
178 |
+
|
179 |
+
def __init__(self, depth=728, drop_rate=0.2):
|
180 |
+
super(GuidedAttention, self).__init__()
|
181 |
+
self.depth = depth
|
182 |
+
self.gated = nn.Sequential(
|
183 |
+
nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False),
|
184 |
+
nn.ReLU(True),
|
185 |
+
nn.Conv2d(3, 1, 1, bias=False),
|
186 |
+
nn.Sigmoid()
|
187 |
+
)
|
188 |
+
self.h = nn.Sequential(
|
189 |
+
nn.Conv2d(depth, depth, 1, 1, bias=False),
|
190 |
+
nn.BatchNorm2d(depth),
|
191 |
+
nn.ReLU(True),
|
192 |
+
)
|
193 |
+
self.dropout = nn.Dropout(drop_rate)
|
194 |
+
|
195 |
+
def forward(self, x, pred_x, embedding):
|
196 |
+
residual_full = torch.abs(x - pred_x)
|
197 |
+
residual_x = F.interpolate(residual_full, size=embedding.shape[-2:],
|
198 |
+
mode='bilinear', align_corners=True)
|
199 |
+
res_map = self.gated(residual_x)
|
200 |
+
return res_map * self.h(embedding) + self.dropout(embedding)
|
model/network/Recce.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from functools import partial
|
2 |
+
from timm.models import xception
|
3 |
+
from model.common import SeparableConv2d, Block
|
4 |
+
from model.common import GuidedAttention, GraphReasoning
|
5 |
+
|
6 |
+
import torch
|
7 |
+
import torch.nn as nn
|
8 |
+
import torch.nn.functional as F
|
9 |
+
|
10 |
+
encoder_params = {
|
11 |
+
"xception": {
|
12 |
+
"features": 2048,
|
13 |
+
"init_op": partial(xception, pretrained=True)
|
14 |
+
}
|
15 |
+
}
|
16 |
+
|
17 |
+
|
18 |
+
class Recce(nn.Module):
|
19 |
+
""" End-to-End Reconstruction-Classification Learning for Face Forgery Detection """
|
20 |
+
|
21 |
+
def __init__(self, num_classes, drop_rate=0.2):
|
22 |
+
super(Recce, self).__init__()
|
23 |
+
self.name = "xception"
|
24 |
+
self.loss_inputs = dict()
|
25 |
+
self.encoder = encoder_params[self.name]["init_op"]()
|
26 |
+
self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
|
27 |
+
self.dropout = nn.Dropout(drop_rate)
|
28 |
+
self.fc = nn.Linear(encoder_params[self.name]["features"], num_classes)
|
29 |
+
|
30 |
+
self.attention = GuidedAttention(depth=728, drop_rate=drop_rate)
|
31 |
+
self.reasoning = GraphReasoning(728, 256, 256, 256, 128, 256, [2, 4], drop_rate)
|
32 |
+
|
33 |
+
self.decoder1 = nn.Sequential(
|
34 |
+
nn.UpsamplingNearest2d(scale_factor=2),
|
35 |
+
SeparableConv2d(728, 256, 3, 1, 1, bias=False),
|
36 |
+
nn.BatchNorm2d(256),
|
37 |
+
nn.ReLU(inplace=True)
|
38 |
+
)
|
39 |
+
self.decoder2 = Block(256, 256, 3, 1)
|
40 |
+
self.decoder3 = nn.Sequential(
|
41 |
+
nn.UpsamplingNearest2d(scale_factor=2),
|
42 |
+
SeparableConv2d(256, 128, 3, 1, 1, bias=False),
|
43 |
+
nn.BatchNorm2d(128),
|
44 |
+
nn.ReLU(inplace=True)
|
45 |
+
)
|
46 |
+
self.decoder4 = Block(128, 128, 3, 1)
|
47 |
+
self.decoder5 = nn.Sequential(
|
48 |
+
nn.UpsamplingNearest2d(scale_factor=2),
|
49 |
+
SeparableConv2d(128, 64, 3, 1, 1, bias=False),
|
50 |
+
nn.BatchNorm2d(64),
|
51 |
+
nn.ReLU(inplace=True)
|
52 |
+
)
|
53 |
+
self.decoder6 = nn.Sequential(
|
54 |
+
nn.Conv2d(64, 3, 1, 1, bias=False),
|
55 |
+
nn.Tanh()
|
56 |
+
)
|
57 |
+
|
58 |
+
def norm_n_corr(self, x):
|
59 |
+
norm_embed = F.normalize(self.global_pool(x), p=2, dim=1)
|
60 |
+
corr = (torch.matmul(norm_embed.squeeze(), norm_embed.squeeze().T) + 1.) / 2.
|
61 |
+
return norm_embed, corr
|
62 |
+
|
63 |
+
@staticmethod
|
64 |
+
def add_white_noise(tensor, mean=0., std=1e-6):
|
65 |
+
rand = torch.rand([tensor.shape[0], 1, 1, 1])
|
66 |
+
rand = torch.where(rand > 0.5, 1., 0.).to(tensor.device)
|
67 |
+
white_noise = torch.normal(mean, std, size=tensor.shape, device=tensor.device)
|
68 |
+
noise_t = tensor + white_noise * rand
|
69 |
+
noise_t = torch.clip(noise_t, -1., 1.)
|
70 |
+
return noise_t
|
71 |
+
|
72 |
+
def forward(self, x):
|
73 |
+
# clear the loss inputs
|
74 |
+
self.loss_inputs = dict(recons=[], contra=[])
|
75 |
+
noise_x = self.add_white_noise(x) if self.training else x
|
76 |
+
out = self.encoder.conv1(noise_x)
|
77 |
+
out = self.encoder.bn1(out)
|
78 |
+
out = self.encoder.act1(out)
|
79 |
+
out = self.encoder.conv2(out)
|
80 |
+
out = self.encoder.bn2(out)
|
81 |
+
out = self.encoder.act2(out)
|
82 |
+
out = self.encoder.block1(out)
|
83 |
+
out = self.encoder.block2(out)
|
84 |
+
out = self.encoder.block3(out)
|
85 |
+
embedding = self.encoder.block4(out)
|
86 |
+
|
87 |
+
norm_embed, corr = self.norm_n_corr(embedding)
|
88 |
+
self.loss_inputs['contra'].append(corr)
|
89 |
+
|
90 |
+
out = self.dropout(embedding)
|
91 |
+
out = self.decoder1(out)
|
92 |
+
out_d2 = self.decoder2(out)
|
93 |
+
|
94 |
+
norm_embed, corr = self.norm_n_corr(out_d2)
|
95 |
+
self.loss_inputs['contra'].append(corr)
|
96 |
+
|
97 |
+
out = self.decoder3(out_d2)
|
98 |
+
out_d4 = self.decoder4(out)
|
99 |
+
|
100 |
+
norm_embed, corr = self.norm_n_corr(out_d4)
|
101 |
+
self.loss_inputs['contra'].append(corr)
|
102 |
+
|
103 |
+
out = self.decoder5(out_d4)
|
104 |
+
pred = self.decoder6(out)
|
105 |
+
|
106 |
+
recons_x = F.interpolate(pred, size=x.shape[-2:], mode='bilinear', align_corners=True)
|
107 |
+
self.loss_inputs['recons'].append(recons_x)
|
108 |
+
|
109 |
+
embedding = self.encoder.block5(embedding)
|
110 |
+
embedding = self.encoder.block6(embedding)
|
111 |
+
embedding = self.encoder.block7(embedding)
|
112 |
+
|
113 |
+
fusion = self.reasoning(embedding, out_d2, out_d4) + embedding
|
114 |
+
|
115 |
+
embedding = self.encoder.block8(fusion)
|
116 |
+
img_att = self.attention(x, recons_x, embedding)
|
117 |
+
|
118 |
+
embedding = self.encoder.block9(img_att)
|
119 |
+
embedding = self.encoder.block10(embedding)
|
120 |
+
embedding = self.encoder.block11(embedding)
|
121 |
+
embedding = self.encoder.block12(embedding)
|
122 |
+
|
123 |
+
embedding = self.encoder.conv3(embedding)
|
124 |
+
embedding = self.encoder.bn3(embedding)
|
125 |
+
embedding = self.encoder.act3(embedding)
|
126 |
+
embedding = self.encoder.conv4(embedding)
|
127 |
+
embedding = self.encoder.bn4(embedding)
|
128 |
+
embedding = self.encoder.act4(embedding)
|
129 |
+
|
130 |
+
embedding = self.global_pool(embedding).squeeze()
|
131 |
+
|
132 |
+
out = self.dropout(embedding)
|
133 |
+
return self.fc(out)
|
model/network/__init__.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
from .Recce import Recce
|
model/network/__pycache__/Recce.cpython-39.pyc
ADDED
Binary file (3.8 kB). View file
|
|
model/network/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (172 Bytes). View file
|
|
models/__init__.py
ADDED
File without changes
|
models/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (165 Bytes). View file
|
|
models/__pycache__/net.cpython-39.pyc
ADDED
Binary file (4.14 kB). View file
|
|
models/__pycache__/retinaface.cpython-39.pyc
ADDED
Binary file (5.5 kB). View file
|
|
models/net.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import time
|
2 |
+
import torch
|
3 |
+
import torch.nn as nn
|
4 |
+
import torchvision.models._utils as _utils
|
5 |
+
import torchvision.models as models
|
6 |
+
import torch.nn.functional as F
|
7 |
+
from torch.autograd import Variable
|
8 |
+
|
9 |
+
def conv_bn(inp, oup, stride = 1, leaky = 0):
|
10 |
+
return nn.Sequential(
|
11 |
+
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
|
12 |
+
nn.BatchNorm2d(oup),
|
13 |
+
nn.LeakyReLU(negative_slope=leaky, inplace=True)
|
14 |
+
)
|
15 |
+
|
16 |
+
def conv_bn_no_relu(inp, oup, stride):
|
17 |
+
return nn.Sequential(
|
18 |
+
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
|
19 |
+
nn.BatchNorm2d(oup),
|
20 |
+
)
|
21 |
+
|
22 |
+
def conv_bn1X1(inp, oup, stride, leaky=0):
|
23 |
+
return nn.Sequential(
|
24 |
+
nn.Conv2d(inp, oup, 1, stride, padding=0, bias=False),
|
25 |
+
nn.BatchNorm2d(oup),
|
26 |
+
nn.LeakyReLU(negative_slope=leaky, inplace=True)
|
27 |
+
)
|
28 |
+
|
29 |
+
def conv_dw(inp, oup, stride, leaky=0.1):
|
30 |
+
return nn.Sequential(
|
31 |
+
nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
|
32 |
+
nn.BatchNorm2d(inp),
|
33 |
+
nn.LeakyReLU(negative_slope= leaky,inplace=True),
|
34 |
+
|
35 |
+
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
|
36 |
+
nn.BatchNorm2d(oup),
|
37 |
+
nn.LeakyReLU(negative_slope= leaky,inplace=True),
|
38 |
+
)
|
39 |
+
|
40 |
+
class SSH(nn.Module):
|
41 |
+
def __init__(self, in_channel, out_channel):
|
42 |
+
super(SSH, self).__init__()
|
43 |
+
assert out_channel % 4 == 0
|
44 |
+
leaky = 0
|
45 |
+
if (out_channel <= 64):
|
46 |
+
leaky = 0.1
|
47 |
+
self.conv3X3 = conv_bn_no_relu(in_channel, out_channel//2, stride=1)
|
48 |
+
|
49 |
+
self.conv5X5_1 = conv_bn(in_channel, out_channel//4, stride=1, leaky = leaky)
|
50 |
+
self.conv5X5_2 = conv_bn_no_relu(out_channel//4, out_channel//4, stride=1)
|
51 |
+
|
52 |
+
self.conv7X7_2 = conv_bn(out_channel//4, out_channel//4, stride=1, leaky = leaky)
|
53 |
+
self.conv7x7_3 = conv_bn_no_relu(out_channel//4, out_channel//4, stride=1)
|
54 |
+
|
55 |
+
def forward(self, input):
|
56 |
+
conv3X3 = self.conv3X3(input)
|
57 |
+
|
58 |
+
conv5X5_1 = self.conv5X5_1(input)
|
59 |
+
conv5X5 = self.conv5X5_2(conv5X5_1)
|
60 |
+
|
61 |
+
conv7X7_2 = self.conv7X7_2(conv5X5_1)
|
62 |
+
conv7X7 = self.conv7x7_3(conv7X7_2)
|
63 |
+
|
64 |
+
out = torch.cat([conv3X3, conv5X5, conv7X7], dim=1)
|
65 |
+
out = F.relu(out)
|
66 |
+
return out
|
67 |
+
|
68 |
+
class FPN(nn.Module):
|
69 |
+
def __init__(self,in_channels_list,out_channels):
|
70 |
+
super(FPN,self).__init__()
|
71 |
+
leaky = 0
|
72 |
+
if (out_channels <= 64):
|
73 |
+
leaky = 0.1
|
74 |
+
self.output1 = conv_bn1X1(in_channels_list[0], out_channels, stride = 1, leaky = leaky)
|
75 |
+
self.output2 = conv_bn1X1(in_channels_list[1], out_channels, stride = 1, leaky = leaky)
|
76 |
+
self.output3 = conv_bn1X1(in_channels_list[2], out_channels, stride = 1, leaky = leaky)
|
77 |
+
|
78 |
+
self.merge1 = conv_bn(out_channels, out_channels, leaky = leaky)
|
79 |
+
self.merge2 = conv_bn(out_channels, out_channels, leaky = leaky)
|
80 |
+
|
81 |
+
def forward(self, input):
|
82 |
+
# names = list(input.keys())
|
83 |
+
input = list(input.values())
|
84 |
+
|
85 |
+
output1 = self.output1(input[0])
|
86 |
+
output2 = self.output2(input[1])
|
87 |
+
output3 = self.output3(input[2])
|
88 |
+
|
89 |
+
up3 = F.interpolate(output3, size=[output2.size(2), output2.size(3)], mode="nearest")
|
90 |
+
output2 = output2 + up3
|
91 |
+
output2 = self.merge2(output2)
|
92 |
+
|
93 |
+
up2 = F.interpolate(output2, size=[output1.size(2), output1.size(3)], mode="nearest")
|
94 |
+
output1 = output1 + up2
|
95 |
+
output1 = self.merge1(output1)
|
96 |
+
|
97 |
+
out = [output1, output2, output3]
|
98 |
+
return out
|
99 |
+
|
100 |
+
|
101 |
+
|
102 |
+
class MobileNetV1(nn.Module):
|
103 |
+
def __init__(self):
|
104 |
+
super(MobileNetV1, self).__init__()
|
105 |
+
self.stage1 = nn.Sequential(
|
106 |
+
conv_bn(3, 8, 2, leaky = 0.1), # 3
|
107 |
+
conv_dw(8, 16, 1), # 7
|
108 |
+
conv_dw(16, 32, 2), # 11
|
109 |
+
conv_dw(32, 32, 1), # 19
|
110 |
+
conv_dw(32, 64, 2), # 27
|
111 |
+
conv_dw(64, 64, 1), # 43
|
112 |
+
)
|
113 |
+
self.stage2 = nn.Sequential(
|
114 |
+
conv_dw(64, 128, 2), # 43 + 16 = 59
|
115 |
+
conv_dw(128, 128, 1), # 59 + 32 = 91
|
116 |
+
conv_dw(128, 128, 1), # 91 + 32 = 123
|
117 |
+
conv_dw(128, 128, 1), # 123 + 32 = 155
|
118 |
+
conv_dw(128, 128, 1), # 155 + 32 = 187
|
119 |
+
conv_dw(128, 128, 1), # 187 + 32 = 219
|
120 |
+
)
|
121 |
+
self.stage3 = nn.Sequential(
|
122 |
+
conv_dw(128, 256, 2), # 219 +3 2 = 241
|
123 |
+
conv_dw(256, 256, 1), # 241 + 64 = 301
|
124 |
+
)
|
125 |
+
self.avg = nn.AdaptiveAvgPool2d((1,1))
|
126 |
+
self.fc = nn.Linear(256, 1000)
|
127 |
+
|
128 |
+
def forward(self, x):
|
129 |
+
x = self.stage1(x)
|
130 |
+
x = self.stage2(x)
|
131 |
+
x = self.stage3(x)
|
132 |
+
x = self.avg(x)
|
133 |
+
# x = self.model(x)
|
134 |
+
x = x.view(-1, 256)
|
135 |
+
x = self.fc(x)
|
136 |
+
return x
|
137 |
+
|
models/retinaface.py
ADDED
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torchvision.models.detection.backbone_utils as backbone_utils
|
4 |
+
import torchvision.models._utils as _utils
|
5 |
+
import torch.nn.functional as F
|
6 |
+
from collections import OrderedDict
|
7 |
+
|
8 |
+
from models.net import MobileNetV1 as MobileNetV1
|
9 |
+
from models.net import FPN as FPN
|
10 |
+
from models.net import SSH as SSH
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
class ClassHead(nn.Module):
|
15 |
+
def __init__(self,inchannels=512,num_anchors=3):
|
16 |
+
super(ClassHead,self).__init__()
|
17 |
+
self.num_anchors = num_anchors
|
18 |
+
self.conv1x1 = nn.Conv2d(inchannels,self.num_anchors*2,kernel_size=(1,1),stride=1,padding=0)
|
19 |
+
|
20 |
+
def forward(self,x):
|
21 |
+
out = self.conv1x1(x)
|
22 |
+
out = out.permute(0,2,3,1).contiguous()
|
23 |
+
|
24 |
+
return out.view(out.shape[0], -1, 2)
|
25 |
+
|
26 |
+
class BboxHead(nn.Module):
|
27 |
+
def __init__(self,inchannels=512,num_anchors=3):
|
28 |
+
super(BboxHead,self).__init__()
|
29 |
+
self.conv1x1 = nn.Conv2d(inchannels,num_anchors*4,kernel_size=(1,1),stride=1,padding=0)
|
30 |
+
|
31 |
+
def forward(self,x):
|
32 |
+
out = self.conv1x1(x)
|
33 |
+
out = out.permute(0,2,3,1).contiguous()
|
34 |
+
|
35 |
+
return out.view(out.shape[0], -1, 4)
|
36 |
+
|
37 |
+
class LandmarkHead(nn.Module):
|
38 |
+
def __init__(self,inchannels=512,num_anchors=3):
|
39 |
+
super(LandmarkHead,self).__init__()
|
40 |
+
self.conv1x1 = nn.Conv2d(inchannels,num_anchors*10,kernel_size=(1,1),stride=1,padding=0)
|
41 |
+
|
42 |
+
def forward(self,x):
|
43 |
+
out = self.conv1x1(x)
|
44 |
+
out = out.permute(0,2,3,1).contiguous()
|
45 |
+
|
46 |
+
return out.view(out.shape[0], -1, 10)
|
47 |
+
|
48 |
+
class RetinaFace(nn.Module):
|
49 |
+
def __init__(self, cfg = None, phase = 'train'):
|
50 |
+
"""
|
51 |
+
:param cfg: Network related settings.
|
52 |
+
:param phase: train or test.
|
53 |
+
"""
|
54 |
+
super(RetinaFace,self).__init__()
|
55 |
+
self.phase = phase
|
56 |
+
backbone = None
|
57 |
+
if cfg['name'] == 'mobilenet0.25':
|
58 |
+
backbone = MobileNetV1()
|
59 |
+
if cfg['pretrain']:
|
60 |
+
checkpoint = torch.load("./weights/mobilenetV1X0.25_pretrain.tar", map_location=torch.device('cpu'))
|
61 |
+
from collections import OrderedDict
|
62 |
+
new_state_dict = OrderedDict()
|
63 |
+
for k, v in checkpoint['state_dict'].items():
|
64 |
+
name = k[7:] # remove module.
|
65 |
+
new_state_dict[name] = v
|
66 |
+
# load params
|
67 |
+
backbone.load_state_dict(new_state_dict)
|
68 |
+
elif cfg['name'] == 'Resnet50':
|
69 |
+
import torchvision.models as models
|
70 |
+
backbone = models.resnet50(pretrained=cfg['pretrain'])
|
71 |
+
|
72 |
+
self.body = _utils.IntermediateLayerGetter(backbone, cfg['return_layers'])
|
73 |
+
in_channels_stage2 = cfg['in_channel']
|
74 |
+
in_channels_list = [
|
75 |
+
in_channels_stage2 * 2,
|
76 |
+
in_channels_stage2 * 4,
|
77 |
+
in_channels_stage2 * 8,
|
78 |
+
]
|
79 |
+
out_channels = cfg['out_channel']
|
80 |
+
self.fpn = FPN(in_channels_list,out_channels)
|
81 |
+
self.ssh1 = SSH(out_channels, out_channels)
|
82 |
+
self.ssh2 = SSH(out_channels, out_channels)
|
83 |
+
self.ssh3 = SSH(out_channels, out_channels)
|
84 |
+
|
85 |
+
self.ClassHead = self._make_class_head(fpn_num=3, inchannels=cfg['out_channel'])
|
86 |
+
self.BboxHead = self._make_bbox_head(fpn_num=3, inchannels=cfg['out_channel'])
|
87 |
+
self.LandmarkHead = self._make_landmark_head(fpn_num=3, inchannels=cfg['out_channel'])
|
88 |
+
|
89 |
+
def _make_class_head(self,fpn_num=3,inchannels=64,anchor_num=2):
|
90 |
+
classhead = nn.ModuleList()
|
91 |
+
for i in range(fpn_num):
|
92 |
+
classhead.append(ClassHead(inchannels,anchor_num))
|
93 |
+
return classhead
|
94 |
+
|
95 |
+
def _make_bbox_head(self,fpn_num=3,inchannels=64,anchor_num=2):
|
96 |
+
bboxhead = nn.ModuleList()
|
97 |
+
for i in range(fpn_num):
|
98 |
+
bboxhead.append(BboxHead(inchannels,anchor_num))
|
99 |
+
return bboxhead
|
100 |
+
|
101 |
+
def _make_landmark_head(self,fpn_num=3,inchannels=64,anchor_num=2):
|
102 |
+
landmarkhead = nn.ModuleList()
|
103 |
+
for i in range(fpn_num):
|
104 |
+
landmarkhead.append(LandmarkHead(inchannels,anchor_num))
|
105 |
+
return landmarkhead
|
106 |
+
|
107 |
+
def forward(self,inputs):
|
108 |
+
out = self.body(inputs)
|
109 |
+
|
110 |
+
# FPN
|
111 |
+
fpn = self.fpn(out)
|
112 |
+
|
113 |
+
# SSH
|
114 |
+
feature1 = self.ssh1(fpn[0])
|
115 |
+
feature2 = self.ssh2(fpn[1])
|
116 |
+
feature3 = self.ssh3(fpn[2])
|
117 |
+
features = [feature1, feature2, feature3]
|
118 |
+
|
119 |
+
bbox_regressions = torch.cat([self.BboxHead[i](feature) for i, feature in enumerate(features)], dim=1)
|
120 |
+
classifications = torch.cat([self.ClassHead[i](feature) for i, feature in enumerate(features)],dim=1)
|
121 |
+
ldm_regressions = torch.cat([self.LandmarkHead[i](feature) for i, feature in enumerate(features)], dim=1)
|
122 |
+
|
123 |
+
if self.phase == 'train':
|
124 |
+
output = (bbox_regressions, classifications, ldm_regressions)
|
125 |
+
else:
|
126 |
+
output = (bbox_regressions, F.softmax(classifications, dim=-1), ldm_regressions)
|
127 |
+
return output
|
optimizer/__init__.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch.optim import SGD
|
2 |
+
from torch.optim import Adam
|
3 |
+
from torch.optim import ASGD
|
4 |
+
from torch.optim import Adamax
|
5 |
+
from torch.optim import Adadelta
|
6 |
+
from torch.optim import Adagrad
|
7 |
+
from torch.optim import RMSprop
|
8 |
+
|
9 |
+
key2opt = {
|
10 |
+
'sgd': SGD,
|
11 |
+
'adam': Adam,
|
12 |
+
'asgd': ASGD,
|
13 |
+
'adamax': Adamax,
|
14 |
+
'adadelta': Adadelta,
|
15 |
+
'adagrad': Adagrad,
|
16 |
+
'rmsprop': RMSprop,
|
17 |
+
}
|
18 |
+
|
19 |
+
|
20 |
+
def get_optimizer(optimizer_name=None):
|
21 |
+
if optimizer_name is None:
|
22 |
+
print("Using default 'SGD' optimizer")
|
23 |
+
return SGD
|
24 |
+
|
25 |
+
else:
|
26 |
+
if optimizer_name not in key2opt:
|
27 |
+
raise NotImplementedError(f"Optimizer '{optimizer_name}' not implemented")
|
28 |
+
|
29 |
+
print(f"Using optimizer: '{optimizer_name}'")
|
30 |
+
return key2opt[optimizer_name]
|
requirements.txt
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
albumentations==1.3.1
|
2 |
+
bbox==0.9.4
|
3 |
+
Cython==0.29.35
|
4 |
+
ipython==8.14.0
|
5 |
+
matplotlib==3.4.3
|
6 |
+
numpy==1.25.0
|
7 |
+
numpy==1.21.1
|
8 |
+
opencv_python==4.5.5.62
|
9 |
+
opencv_python_headless==4.7.0.72
|
10 |
+
Pillow==9.3.0
|
11 |
+
Pillow==9.4.0
|
12 |
+
Pillow==9.5.0
|
13 |
+
PyYAML==6.0
|
14 |
+
PyYAML==6.0
|
15 |
+
scikit_learn==1.2.2
|
16 |
+
scipy==1.8.0
|
17 |
+
streamlit==1.24.0
|
18 |
+
tensorboardX==2.6.1
|
19 |
+
timm==0.4.12
|
20 |
+
torch==1.10.0
|
21 |
+
torch==2.0.1+cu117
|
22 |
+
torchvision==0.11.1
|
23 |
+
torchvision==0.15.2+cu117
|
24 |
+
tqdm==4.65.0
|
scheduler/__init__.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch.optim.lr_scheduler import _LRScheduler
|
2 |
+
from torch.optim.lr_scheduler import StepLR
|
3 |
+
from torch.optim.lr_scheduler import MultiStepLR
|
4 |
+
from torch.optim.lr_scheduler import ExponentialLR
|
5 |
+
from torch.optim.lr_scheduler import CosineAnnealingLR
|
6 |
+
from torch.optim.lr_scheduler import CosineAnnealingWarmRestarts
|
7 |
+
from torch.optim.lr_scheduler import ReduceLROnPlateau
|
8 |
+
|
9 |
+
|
10 |
+
class ConstantLR(_LRScheduler):
|
11 |
+
def __init__(self, optimizer, last_epoch=-1):
|
12 |
+
super(ConstantLR, self).__init__(optimizer, last_epoch)
|
13 |
+
|
14 |
+
def get_lr(self):
|
15 |
+
return [base_lr for base_lr in self.base_lrs]
|
16 |
+
|
17 |
+
|
18 |
+
SCHEDULERS = {
|
19 |
+
'ConstantLR': ConstantLR,
|
20 |
+
"StepLR": StepLR,
|
21 |
+
"MultiStepLR": MultiStepLR,
|
22 |
+
"CosineAnnealingLR": CosineAnnealingLR,
|
23 |
+
"CosineAnnealingWarmRestarts": CosineAnnealingWarmRestarts,
|
24 |
+
"ExponentialLR": ExponentialLR,
|
25 |
+
"ReduceLROnPlateau": ReduceLROnPlateau
|
26 |
+
}
|
27 |
+
|
28 |
+
|
29 |
+
def get_scheduler(optimizer, kwargs):
|
30 |
+
if kwargs is None:
|
31 |
+
print("No lr scheduler is used.")
|
32 |
+
return ConstantLR(optimizer)
|
33 |
+
name = kwargs["name"]
|
34 |
+
kwargs.pop("name")
|
35 |
+
print("Using scheduler: '%s' with params: %s" % (name, kwargs))
|
36 |
+
return SCHEDULERS[name](optimizer, **kwargs)
|
trainer/__init__.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .abstract_trainer import AbstractTrainer, LEGAL_METRIC
|
2 |
+
from .exp_mgpu_trainer import ExpMultiGpuTrainer
|
3 |
+
from .exp_tester import ExpTester
|
4 |
+
from .utils import center_print, reduce_tensor
|
5 |
+
from .utils import exp_recons_loss
|
trainer/abstract_trainer.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import torch
|
3 |
+
import random
|
4 |
+
from collections import OrderedDict
|
5 |
+
from torchvision.utils import make_grid
|
6 |
+
|
7 |
+
LEGAL_METRIC = ['Acc', 'AUC', 'LogLoss']
|
8 |
+
|
9 |
+
|
10 |
+
class AbstractTrainer(object):
|
11 |
+
def __init__(self, config, stage="Train"):
|
12 |
+
feasible_stage = ["Train", "Test"]
|
13 |
+
if stage not in feasible_stage:
|
14 |
+
raise ValueError(f"stage should be in {feasible_stage}, but found '{stage}'")
|
15 |
+
|
16 |
+
self.config = config
|
17 |
+
model_cfg = config.get("model", None)
|
18 |
+
data_cfg = config.get("data", None)
|
19 |
+
config_cfg = config.get("config", None)
|
20 |
+
|
21 |
+
self.model_name = model_cfg.pop("name")
|
22 |
+
|
23 |
+
self.gpu = None
|
24 |
+
self.dir = None
|
25 |
+
self.debug = None
|
26 |
+
self.device = None
|
27 |
+
self.resume = None
|
28 |
+
self.local_rank = None
|
29 |
+
self.num_classes = None
|
30 |
+
|
31 |
+
self.best_metric = 0.0
|
32 |
+
self.best_step = 1
|
33 |
+
self.start_step = 1
|
34 |
+
|
35 |
+
self._initiated_settings(model_cfg, data_cfg, config_cfg)
|
36 |
+
|
37 |
+
if stage == 'Train':
|
38 |
+
self._train_settings(model_cfg, data_cfg, config_cfg)
|
39 |
+
if stage == 'Test':
|
40 |
+
self._test_settings(model_cfg, data_cfg, config_cfg)
|
41 |
+
|
42 |
+
def _initiated_settings(self, model_cfg, data_cfg, config_cfg):
|
43 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
44 |
+
|
45 |
+
def _train_settings(self, model_cfg, data_cfg, config_cfg):
|
46 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
47 |
+
|
48 |
+
def _test_settings(self, model_cfg, data_cfg, config_cfg):
|
49 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
50 |
+
|
51 |
+
def _save_ckpt(self, step, best=False):
|
52 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
53 |
+
|
54 |
+
def _load_ckpt(self, best=False, train=False):
|
55 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
56 |
+
|
57 |
+
def to_device(self, items):
|
58 |
+
return [obj.to(self.device) for obj in items]
|
59 |
+
|
60 |
+
@staticmethod
|
61 |
+
def fixed_randomness():
|
62 |
+
random.seed(0)
|
63 |
+
torch.manual_seed(0)
|
64 |
+
torch.cuda.manual_seed(0)
|
65 |
+
torch.cuda.manual_seed_all(0)
|
66 |
+
|
67 |
+
def train(self):
|
68 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
69 |
+
|
70 |
+
def validate(self, epoch, step, timer, writer):
|
71 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
72 |
+
|
73 |
+
def test(self):
|
74 |
+
raise NotImplementedError("Not implemented in abstract class.")
|
75 |
+
|
76 |
+
def plot_figure(self, images, pred, gt, nrow, categories=None, show=True):
|
77 |
+
import matplotlib.pyplot as plt
|
78 |
+
plot = make_grid(
|
79 |
+
images, nrow, padding=4, normalize=True, scale_each=True, pad_value=1)
|
80 |
+
if self.num_classes == 1:
|
81 |
+
pred = (pred >= 0.5).cpu().numpy()
|
82 |
+
else:
|
83 |
+
pred = pred.argmax(1).cpu().numpy()
|
84 |
+
gt = gt.cpu().numpy()
|
85 |
+
if categories is not None:
|
86 |
+
pred = [categories[i] for i in pred]
|
87 |
+
gt = [categories[i] for i in gt]
|
88 |
+
plot = plot.permute([1, 2, 0])
|
89 |
+
plot = plot.cpu().numpy()
|
90 |
+
ret = plt.figure()
|
91 |
+
plt.imshow(plot)
|
92 |
+
plt.title("pred: %s\ngt: %s" % (pred, gt))
|
93 |
+
plt.axis("off")
|
94 |
+
if show:
|
95 |
+
plt.savefig(os.path.join(self.dir, "test_image.png"), dpi=300)
|
96 |
+
plt.show()
|
97 |
+
plt.close()
|
98 |
+
else:
|
99 |
+
plt.close()
|
100 |
+
return ret
|
trainer/exp_mgpu_trainer.py
ADDED
@@ -0,0 +1,370 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import time
|
4 |
+
import math
|
5 |
+
import yaml
|
6 |
+
import torch
|
7 |
+
import random
|
8 |
+
import numpy as np
|
9 |
+
|
10 |
+
from tqdm import tqdm
|
11 |
+
from pprint import pprint
|
12 |
+
from torch.utils import data
|
13 |
+
import torch.distributed as dist
|
14 |
+
from torch.cuda.amp import autocast, GradScaler
|
15 |
+
from tensorboardX import SummaryWriter
|
16 |
+
|
17 |
+
from dataset import load_dataset
|
18 |
+
from loss import get_loss
|
19 |
+
from model import load_model
|
20 |
+
from optimizer import get_optimizer
|
21 |
+
from scheduler import get_scheduler
|
22 |
+
from trainer import AbstractTrainer, LEGAL_METRIC
|
23 |
+
from trainer.utils import exp_recons_loss, MLLoss, reduce_tensor, center_print
|
24 |
+
from trainer.utils import MODELS_PATH, AccMeter, AUCMeter, AverageMeter, Logger, Timer
|
25 |
+
|
26 |
+
|
27 |
+
class ExpMultiGpuTrainer(AbstractTrainer):
|
28 |
+
def __init__(self, config, stage="Train"):
|
29 |
+
super(ExpMultiGpuTrainer, self).__init__(config, stage)
|
30 |
+
np.random.seed(2021)
|
31 |
+
|
32 |
+
def _mprint(self, content=""):
|
33 |
+
if self.local_rank == 0:
|
34 |
+
print(content)
|
35 |
+
|
36 |
+
def _initiated_settings(self, model_cfg=None, data_cfg=None, config_cfg=None):
|
37 |
+
self.local_rank = config_cfg["local_rank"]
|
38 |
+
|
39 |
+
def _train_settings(self, model_cfg, data_cfg, config_cfg):
|
40 |
+
# debug mode: no log dir, no train_val operation.
|
41 |
+
self.debug = config_cfg["debug"]
|
42 |
+
self._mprint(f"Using debug mode: {self.debug}.")
|
43 |
+
self._mprint("*" * 20)
|
44 |
+
|
45 |
+
self.eval_metric = config_cfg["metric"]
|
46 |
+
if self.eval_metric not in LEGAL_METRIC:
|
47 |
+
raise ValueError(f"Evaluation metric must be in {LEGAL_METRIC}, but found "
|
48 |
+
f"{self.eval_metric}.")
|
49 |
+
if self.eval_metric == LEGAL_METRIC[-1]:
|
50 |
+
self.best_metric = 1.0e8
|
51 |
+
|
52 |
+
# distribution
|
53 |
+
dist.init_process_group(config_cfg["distribute"]["backend"])
|
54 |
+
|
55 |
+
# load training dataset
|
56 |
+
train_dataset = data_cfg["file"]
|
57 |
+
branch = data_cfg["train_branch"]
|
58 |
+
name = data_cfg["name"]
|
59 |
+
with open(train_dataset, "r") as f:
|
60 |
+
options = yaml.load(f, Loader=yaml.FullLoader)
|
61 |
+
train_options = options[branch]
|
62 |
+
self.train_set = load_dataset(name)(train_options)
|
63 |
+
# define training sampler
|
64 |
+
self.train_sampler = data.distributed.DistributedSampler(self.train_set)
|
65 |
+
# wrapped with data loader
|
66 |
+
self.train_loader = data.DataLoader(self.train_set, shuffle=False,
|
67 |
+
sampler=self.train_sampler,
|
68 |
+
num_workers=data_cfg.get("num_workers", 4),
|
69 |
+
batch_size=data_cfg["train_batch_size"])
|
70 |
+
|
71 |
+
if self.local_rank == 0:
|
72 |
+
# load validation dataset
|
73 |
+
val_options = options[data_cfg["val_branch"]]
|
74 |
+
self.val_set = load_dataset(name)(val_options)
|
75 |
+
# wrapped with data loader
|
76 |
+
self.val_loader = data.DataLoader(self.val_set, shuffle=True,
|
77 |
+
num_workers=data_cfg.get("num_workers", 4),
|
78 |
+
batch_size=data_cfg["val_batch_size"])
|
79 |
+
|
80 |
+
self.resume = config_cfg.get("resume", False)
|
81 |
+
|
82 |
+
if not self.debug:
|
83 |
+
time_format = "%Y-%m-%d...%H.%M.%S"
|
84 |
+
run_id = time.strftime(time_format, time.localtime(time.time()))
|
85 |
+
self.run_id = config_cfg.get("id", run_id)
|
86 |
+
self.dir = os.path.join("runs", self.model_name, self.run_id)
|
87 |
+
|
88 |
+
if self.local_rank == 0:
|
89 |
+
if not self.resume:
|
90 |
+
if os.path.exists(self.dir):
|
91 |
+
raise ValueError("Error: given id '%s' already exists." % self.run_id)
|
92 |
+
os.makedirs(self.dir, exist_ok=True)
|
93 |
+
print(f"Writing config file to file directory: {self.dir}.")
|
94 |
+
yaml.dump({"config": self.config,
|
95 |
+
"train_data": train_options,
|
96 |
+
"val_data": val_options},
|
97 |
+
open(os.path.join(self.dir, 'train_config.yml'), 'w'))
|
98 |
+
# copy the script for the training model
|
99 |
+
model_file = MODELS_PATH[self.model_name]
|
100 |
+
os.system("cp " + model_file + " " + self.dir)
|
101 |
+
else:
|
102 |
+
print(f"Resuming the history in file directory: {self.dir}.")
|
103 |
+
|
104 |
+
print(f"Logging directory: {self.dir}.")
|
105 |
+
|
106 |
+
# redirect the std out stream
|
107 |
+
sys.stdout = Logger(os.path.join(self.dir, 'records.txt'))
|
108 |
+
center_print('Train configurations begins.')
|
109 |
+
pprint(self.config)
|
110 |
+
pprint(train_options)
|
111 |
+
pprint(val_options)
|
112 |
+
center_print('Train configurations ends.')
|
113 |
+
|
114 |
+
# load model
|
115 |
+
self.num_classes = model_cfg["num_classes"]
|
116 |
+
self.device = "cuda:" + str(self.local_rank)
|
117 |
+
self.model = load_model(self.model_name)(**model_cfg)
|
118 |
+
self.model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(self.model).to(self.device)
|
119 |
+
self._mprint(f"Using SyncBatchNorm.")
|
120 |
+
self.model = torch.nn.parallel.DistributedDataParallel(
|
121 |
+
self.model, device_ids=[self.local_rank], find_unused_parameters=True)
|
122 |
+
|
123 |
+
# load optimizer
|
124 |
+
optim_cfg = config_cfg.get("optimizer", None)
|
125 |
+
optim_name = optim_cfg.pop("name")
|
126 |
+
self.optimizer = get_optimizer(optim_name)(self.model.parameters(), **optim_cfg)
|
127 |
+
# load scheduler
|
128 |
+
self.scheduler = get_scheduler(self.optimizer, config_cfg.get("scheduler", None))
|
129 |
+
# load loss
|
130 |
+
self.loss_criterion = get_loss(config_cfg.get("loss", None), device=self.device)
|
131 |
+
|
132 |
+
# total number of steps (or epoch) to train
|
133 |
+
self.num_steps = train_options["num_steps"]
|
134 |
+
self.num_epoch = math.ceil(self.num_steps / len(self.train_loader))
|
135 |
+
|
136 |
+
# the number of steps to write down a log
|
137 |
+
self.log_steps = train_options["log_steps"]
|
138 |
+
# the number of steps to validate on val dataset once
|
139 |
+
self.val_steps = train_options["val_steps"]
|
140 |
+
|
141 |
+
# balance coefficients
|
142 |
+
self.lambda_1 = config_cfg["lambda_1"]
|
143 |
+
self.lambda_2 = config_cfg["lambda_2"]
|
144 |
+
self.warmup_step = config_cfg.get('warmup_step', 0)
|
145 |
+
|
146 |
+
self.contra_loss = MLLoss()
|
147 |
+
self.acc_meter = AccMeter()
|
148 |
+
self.loss_meter = AverageMeter()
|
149 |
+
self.recons_loss_meter = AverageMeter()
|
150 |
+
self.contra_loss_meter = AverageMeter()
|
151 |
+
|
152 |
+
if self.resume and self.local_rank == 0:
|
153 |
+
self._load_ckpt(best=config_cfg.get("resume_best", False), train=True)
|
154 |
+
|
155 |
+
def _test_settings(self, model_cfg, data_cfg, config_cfg):
|
156 |
+
# Not used.
|
157 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
158 |
+
|
159 |
+
def _load_ckpt(self, best=False, train=False):
|
160 |
+
# Not used.
|
161 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
162 |
+
|
163 |
+
def _save_ckpt(self, step, best=False):
|
164 |
+
save_dir = os.path.join(self.dir, f"best_model_{step}.bin" if best else "latest_model.bin")
|
165 |
+
torch.save({
|
166 |
+
"step": step,
|
167 |
+
"best_step": self.best_step,
|
168 |
+
"best_metric": self.best_metric,
|
169 |
+
"eval_metric": self.eval_metric,
|
170 |
+
"model": self.model.module.state_dict(),
|
171 |
+
"optimizer": self.optimizer.state_dict(),
|
172 |
+
"scheduler": self.scheduler.state_dict(),
|
173 |
+
}, save_dir)
|
174 |
+
|
175 |
+
def train(self):
|
176 |
+
try:
|
177 |
+
timer = Timer()
|
178 |
+
grad_scalar = GradScaler(2 ** 10)
|
179 |
+
if self.local_rank == 0:
|
180 |
+
writer = None if self.debug else SummaryWriter(log_dir=self.dir)
|
181 |
+
center_print("Training begins......")
|
182 |
+
else:
|
183 |
+
writer = None
|
184 |
+
start_epoch = self.start_step // len(self.train_loader) + 1
|
185 |
+
for epoch_idx in range(start_epoch, self.num_epoch + 1):
|
186 |
+
# set sampler
|
187 |
+
self.train_sampler.set_epoch(epoch_idx)
|
188 |
+
|
189 |
+
# reset meter
|
190 |
+
self.acc_meter.reset()
|
191 |
+
self.loss_meter.reset()
|
192 |
+
self.recons_loss_meter.reset()
|
193 |
+
self.contra_loss_meter.reset()
|
194 |
+
self.optimizer.step()
|
195 |
+
|
196 |
+
train_generator = enumerate(self.train_loader, 1)
|
197 |
+
# wrap train generator with tqdm for process 0
|
198 |
+
if self.local_rank == 0:
|
199 |
+
train_generator = tqdm(train_generator, position=0, leave=True)
|
200 |
+
|
201 |
+
for batch_idx, train_data in train_generator:
|
202 |
+
global_step = (epoch_idx - 1) * len(self.train_loader) + batch_idx
|
203 |
+
self.model.train()
|
204 |
+
I, Y = train_data
|
205 |
+
I = self.train_loader.dataset.load_item(I)
|
206 |
+
in_I, Y = self.to_device((I, Y))
|
207 |
+
|
208 |
+
# warm-up lr
|
209 |
+
if self.warmup_step != 0 and global_step <= self.warmup_step:
|
210 |
+
lr = self.config['config']['optimizer']['lr'] * float(global_step) / self.warmup_step
|
211 |
+
for param_group in self.optimizer.param_groups:
|
212 |
+
param_group['lr'] = lr
|
213 |
+
|
214 |
+
self.optimizer.zero_grad()
|
215 |
+
with autocast():
|
216 |
+
Y_pre = self.model(in_I)
|
217 |
+
|
218 |
+
# for BCE Setting:
|
219 |
+
if self.num_classes == 1:
|
220 |
+
Y_pre = Y_pre.squeeze()
|
221 |
+
loss = self.loss_criterion(Y_pre, Y.float())
|
222 |
+
Y_pre = torch.sigmoid(Y_pre)
|
223 |
+
else:
|
224 |
+
loss = self.loss_criterion(Y_pre, Y)
|
225 |
+
|
226 |
+
# flood
|
227 |
+
loss = (loss - 0.04).abs() + 0.04
|
228 |
+
recons_loss = exp_recons_loss(self.model.module.loss_inputs['recons'], (in_I, Y))
|
229 |
+
contra_loss = self.contra_loss(self.model.module.loss_inputs['contra'], Y)
|
230 |
+
loss += self.lambda_1 * recons_loss + self.lambda_2 * contra_loss
|
231 |
+
|
232 |
+
grad_scalar.scale(loss).backward()
|
233 |
+
grad_scalar.step(self.optimizer)
|
234 |
+
grad_scalar.update()
|
235 |
+
if self.warmup_step == 0 or global_step > self.warmup_step:
|
236 |
+
self.scheduler.step()
|
237 |
+
|
238 |
+
self.acc_meter.update(Y_pre, Y, self.num_classes == 1)
|
239 |
+
self.loss_meter.update(reduce_tensor(loss).item())
|
240 |
+
self.recons_loss_meter.update(reduce_tensor(recons_loss).item())
|
241 |
+
self.contra_loss_meter.update(reduce_tensor(contra_loss).item())
|
242 |
+
iter_acc = reduce_tensor(self.acc_meter.mean_acc()).item()
|
243 |
+
|
244 |
+
if self.local_rank == 0:
|
245 |
+
if global_step % self.log_steps == 0 and writer is not None:
|
246 |
+
writer.add_scalar("train/Acc", iter_acc, global_step)
|
247 |
+
writer.add_scalar("train/Loss", self.loss_meter.avg, global_step)
|
248 |
+
writer.add_scalar("train/Recons_Loss",
|
249 |
+
self.recons_loss_meter.avg if self.lambda_1 != 0 else 0.,
|
250 |
+
global_step)
|
251 |
+
writer.add_scalar("train/Contra_Loss", self.contra_loss_meter.avg, global_step)
|
252 |
+
writer.add_scalar("train/LR", self.scheduler.get_last_lr()[0], global_step)
|
253 |
+
|
254 |
+
# log training step
|
255 |
+
train_generator.set_description(
|
256 |
+
"Train Epoch %d (%d/%d), Global Step %d, Loss %.4f, Recons %.4f, con %.4f, "
|
257 |
+
"ACC %.4f, LR %.6f" % (
|
258 |
+
epoch_idx, batch_idx, len(self.train_loader), global_step,
|
259 |
+
self.loss_meter.avg, self.recons_loss_meter.avg, self.contra_loss_meter.avg,
|
260 |
+
iter_acc, self.scheduler.get_last_lr()[0])
|
261 |
+
)
|
262 |
+
|
263 |
+
# validating process
|
264 |
+
if global_step % self.val_steps == 0 and not self.debug:
|
265 |
+
print()
|
266 |
+
self.validate(epoch_idx, global_step, timer, writer)
|
267 |
+
|
268 |
+
# when num_steps has been set and the training process will
|
269 |
+
# be stopped earlier than the specified num_epochs, then stop.
|
270 |
+
if self.num_steps is not None and global_step == self.num_steps:
|
271 |
+
if writer is not None:
|
272 |
+
writer.close()
|
273 |
+
if self.local_rank == 0:
|
274 |
+
print()
|
275 |
+
center_print("Training process ends.")
|
276 |
+
dist.destroy_process_group()
|
277 |
+
return
|
278 |
+
# close the tqdm bar when one epoch ends
|
279 |
+
if self.local_rank == 0:
|
280 |
+
train_generator.close()
|
281 |
+
print()
|
282 |
+
# training ends with integer epochs
|
283 |
+
if self.local_rank == 0:
|
284 |
+
if writer is not None:
|
285 |
+
writer.close()
|
286 |
+
center_print("Training process ends.")
|
287 |
+
dist.destroy_process_group()
|
288 |
+
except Exception as e:
|
289 |
+
dist.destroy_process_group()
|
290 |
+
raise e
|
291 |
+
|
292 |
+
def validate(self, epoch, step, timer, writer):
|
293 |
+
v_idx = random.randint(1, len(self.val_loader) + 1)
|
294 |
+
categories = self.val_loader.dataset.categories
|
295 |
+
self.model.eval()
|
296 |
+
with torch.no_grad():
|
297 |
+
acc = AccMeter()
|
298 |
+
auc = AUCMeter()
|
299 |
+
loss_meter = AverageMeter()
|
300 |
+
cur_acc = 0.0 # Higher is better
|
301 |
+
cur_auc = 0.0 # Higher is better
|
302 |
+
cur_loss = 1e8 # Lower is better
|
303 |
+
val_generator = tqdm(enumerate(self.val_loader, 1), position=0, leave=True)
|
304 |
+
for val_idx, val_data in val_generator:
|
305 |
+
I, Y = val_data
|
306 |
+
I = self.val_loader.dataset.load_item(I)
|
307 |
+
in_I, Y = self.to_device((I, Y))
|
308 |
+
Y_pre = self.model(in_I)
|
309 |
+
|
310 |
+
# for BCE Setting:
|
311 |
+
if self.num_classes == 1:
|
312 |
+
Y_pre = Y_pre.squeeze()
|
313 |
+
loss = self.loss_criterion(Y_pre, Y.float())
|
314 |
+
Y_pre = torch.sigmoid(Y_pre)
|
315 |
+
else:
|
316 |
+
loss = self.loss_criterion(Y_pre, Y)
|
317 |
+
|
318 |
+
acc.update(Y_pre, Y, self.num_classes == 1)
|
319 |
+
auc.update(Y_pre, Y, self.num_classes == 1)
|
320 |
+
loss_meter.update(loss.item())
|
321 |
+
|
322 |
+
cur_acc = acc.mean_acc()
|
323 |
+
cur_loss = loss_meter.avg
|
324 |
+
|
325 |
+
val_generator.set_description(
|
326 |
+
"Eval Epoch %d (%d/%d), Global Step %d, Loss %.4f, ACC %.4f" % (
|
327 |
+
epoch, val_idx, len(self.val_loader), step,
|
328 |
+
cur_loss, cur_acc)
|
329 |
+
)
|
330 |
+
|
331 |
+
if val_idx == v_idx or val_idx == 1:
|
332 |
+
sample_recons = list()
|
333 |
+
for _ in self.model.module.loss_inputs['recons']:
|
334 |
+
sample_recons.append(_[:4].to("cpu"))
|
335 |
+
# show images
|
336 |
+
images = I[:4]
|
337 |
+
images = torch.cat([images, *sample_recons], dim=0)
|
338 |
+
pred = Y_pre[:4]
|
339 |
+
gt = Y[:4]
|
340 |
+
figure = self.plot_figure(images, pred, gt, 4, categories, show=False)
|
341 |
+
|
342 |
+
cur_auc = auc.mean_auc()
|
343 |
+
print("Eval Epoch %d, Loss %.4f, ACC %.4f, AUC %.4f" % (epoch, cur_loss, cur_acc, cur_auc))
|
344 |
+
if writer is not None:
|
345 |
+
writer.add_scalar("val/Loss", cur_loss, step)
|
346 |
+
writer.add_scalar("val/Acc", cur_acc, step)
|
347 |
+
writer.add_scalar("val/AUC", cur_auc, step)
|
348 |
+
writer.add_figure("val/Figures", figure, step)
|
349 |
+
# record the best acc and the corresponding step
|
350 |
+
if self.eval_metric == 'Acc' and cur_acc >= self.best_metric:
|
351 |
+
self.best_metric = cur_acc
|
352 |
+
self.best_step = step
|
353 |
+
self._save_ckpt(step, best=True)
|
354 |
+
elif self.eval_metric == 'AUC' and cur_auc >= self.best_metric:
|
355 |
+
self.best_metric = cur_auc
|
356 |
+
self.best_step = step
|
357 |
+
self._save_ckpt(step, best=True)
|
358 |
+
elif self.eval_metric == 'LogLoss' and cur_loss <= self.best_metric:
|
359 |
+
self.best_metric = cur_loss
|
360 |
+
self.best_step = step
|
361 |
+
self._save_ckpt(step, best=True)
|
362 |
+
print("Best Step %d, Best %s %.4f, Running Time: %s, Estimated Time: %s" % (
|
363 |
+
self.best_step, self.eval_metric, self.best_metric,
|
364 |
+
timer.measure(), timer.measure(step / self.num_steps)
|
365 |
+
))
|
366 |
+
self._save_ckpt(step, best=False)
|
367 |
+
|
368 |
+
def test(self):
|
369 |
+
# Not used.
|
370 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
trainer/exp_tester.py
ADDED
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import yaml
|
4 |
+
import torch
|
5 |
+
import random
|
6 |
+
|
7 |
+
from tqdm import tqdm
|
8 |
+
from pprint import pprint
|
9 |
+
from torch.utils import data
|
10 |
+
|
11 |
+
from dataset import load_dataset
|
12 |
+
from loss import get_loss
|
13 |
+
from model import load_model
|
14 |
+
from model.common import freeze_weights
|
15 |
+
from trainer import AbstractTrainer
|
16 |
+
from trainer.utils import AccMeter, AUCMeter, AverageMeter, Logger, center_print
|
17 |
+
|
18 |
+
|
19 |
+
class ExpTester(AbstractTrainer):
|
20 |
+
def __init__(self, config, stage="Test"):
|
21 |
+
super(ExpTester, self).__init__(config, stage)
|
22 |
+
|
23 |
+
if torch.cuda.is_available() and self.device is not None:
|
24 |
+
print(f"Using cuda device: {self.device}.")
|
25 |
+
self.gpu = True
|
26 |
+
self.model = self.model.to(self.device)
|
27 |
+
else:
|
28 |
+
print("Using cpu device.")
|
29 |
+
self.device = torch.device("cpu")
|
30 |
+
|
31 |
+
def _initiated_settings(self, model_cfg=None, data_cfg=None, config_cfg=None):
|
32 |
+
self.gpu = False
|
33 |
+
self.device = config_cfg.get("device", None)
|
34 |
+
|
35 |
+
def _train_settings(self, model_cfg=None, data_cfg=None, config_cfg=None):
|
36 |
+
# Not used.
|
37 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
38 |
+
|
39 |
+
def _test_settings(self, model_cfg=None, data_cfg=None, config_cfg=None):
|
40 |
+
# load test dataset
|
41 |
+
test_dataset = data_cfg["file"]
|
42 |
+
branch = data_cfg["test_branch"]
|
43 |
+
name = data_cfg["name"]
|
44 |
+
with open(test_dataset, "r") as f:
|
45 |
+
options = yaml.load(f, Loader=yaml.FullLoader)
|
46 |
+
test_options = options[branch]
|
47 |
+
self.test_set = load_dataset(name)(test_options)
|
48 |
+
# wrapped with data loader
|
49 |
+
self.test_batch_size = data_cfg["test_batch_size"]
|
50 |
+
self.test_loader = data.DataLoader(self.test_set, shuffle=False,
|
51 |
+
batch_size=self.test_batch_size)
|
52 |
+
self.run_id = config_cfg["id"]
|
53 |
+
self.ckpt_fold = config_cfg.get("ckpt_fold", "runs")
|
54 |
+
self.dir = os.path.join(self.ckpt_fold, self.model_name, self.run_id)
|
55 |
+
|
56 |
+
# load model
|
57 |
+
self.num_classes = model_cfg["num_classes"]
|
58 |
+
self.model = load_model(self.model_name)(**model_cfg)
|
59 |
+
|
60 |
+
# load loss
|
61 |
+
self.loss_criterion = get_loss(config_cfg.get("loss", None))
|
62 |
+
|
63 |
+
# redirect the std out stream
|
64 |
+
sys.stdout = Logger(os.path.join(self.dir, "test_result.txt"))
|
65 |
+
print('Run dir: {}'.format(self.dir))
|
66 |
+
|
67 |
+
center_print('Test configurations begins')
|
68 |
+
pprint(self.config)
|
69 |
+
pprint(test_options)
|
70 |
+
center_print('Test configurations ends')
|
71 |
+
|
72 |
+
self.ckpt = config_cfg.get("ckpt", "best_model")
|
73 |
+
self._load_ckpt(best=True, train=False)
|
74 |
+
|
75 |
+
def _save_ckpt(self, step, best=False):
|
76 |
+
# Not used.
|
77 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
78 |
+
|
79 |
+
def _load_ckpt(self, best=False, train=False):
|
80 |
+
load_dir = os.path.join(self.dir, self.ckpt + ".bin" if best else "latest_model.bin")
|
81 |
+
load_dict = torch.load(load_dir, map_location=self.device)
|
82 |
+
self.start_step = load_dict["step"]
|
83 |
+
self.best_step = load_dict["best_step"]
|
84 |
+
self.best_metric = load_dict.get("best_metric", None)
|
85 |
+
if self.best_metric is None:
|
86 |
+
self.best_metric = load_dict.get("best_acc")
|
87 |
+
self.eval_metric = load_dict.get("eval_metric", None)
|
88 |
+
if self.eval_metric is None:
|
89 |
+
self.eval_metric = load_dict.get("Acc")
|
90 |
+
self.model.load_state_dict(load_dict["model"])
|
91 |
+
print(f"Loading checkpoint from {load_dir}, best step: {self.best_step}, "
|
92 |
+
f"best {self.eval_metric}: {round(self.best_metric.item(), 4)}.")
|
93 |
+
|
94 |
+
def train(self):
|
95 |
+
# Not used.
|
96 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
97 |
+
|
98 |
+
def validate(self, epoch, step, timer, writer):
|
99 |
+
# Not used.
|
100 |
+
raise NotImplementedError("The function is not intended to be used here.")
|
101 |
+
|
102 |
+
def test(self, display_images=False):
|
103 |
+
freeze_weights(self.model)
|
104 |
+
t_idx = random.randint(1, len(self.test_loader) + 1)
|
105 |
+
self.fixed_randomness() # for reproduction
|
106 |
+
|
107 |
+
acc = AccMeter()
|
108 |
+
auc = AUCMeter()
|
109 |
+
logloss = AverageMeter()
|
110 |
+
test_generator = tqdm(enumerate(self.test_loader, 1))
|
111 |
+
categories = self.test_loader.dataset.categories
|
112 |
+
for idx, test_data in test_generator:
|
113 |
+
self.model.eval()
|
114 |
+
I, Y = test_data
|
115 |
+
I = self.test_loader.dataset.load_item(I)
|
116 |
+
if self.gpu:
|
117 |
+
in_I, Y = self.to_device((I, Y))
|
118 |
+
else:
|
119 |
+
in_I, Y = (I, Y)
|
120 |
+
Y_pre = self.model(in_I)
|
121 |
+
|
122 |
+
# for BCE Setting:
|
123 |
+
if self.num_classes == 1:
|
124 |
+
Y_pre = Y_pre.squeeze()
|
125 |
+
loss = self.loss_criterion(Y_pre, Y.float())
|
126 |
+
Y_pre = torch.sigmoid(Y_pre)
|
127 |
+
else:
|
128 |
+
loss = self.loss_criterion(Y_pre, Y)
|
129 |
+
|
130 |
+
acc.update(Y_pre, Y, use_bce=self.num_classes == 1)
|
131 |
+
auc.update(Y_pre, Y, use_bce=self.num_classes == 1)
|
132 |
+
logloss.update(loss.item())
|
133 |
+
|
134 |
+
test_generator.set_description("Test %d/%d" % (idx, len(self.test_loader)))
|
135 |
+
if display_images and idx == t_idx:
|
136 |
+
# show images
|
137 |
+
images = I[:4]
|
138 |
+
pred = Y_pre[:4]
|
139 |
+
gt = Y[:4]
|
140 |
+
self.plot_figure(images, pred, gt, 2, categories)
|
141 |
+
|
142 |
+
print("Test, FINAL LOSS %.4f, FINAL ACC %.4f, FINAL AUC %.4f" %
|
143 |
+
(logloss.avg, acc.mean_acc(), auc.mean_auc()))
|
144 |
+
auc.curve(self.dir)
|
trainer/utils.py
ADDED
@@ -0,0 +1,183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import time
|
4 |
+
import torch
|
5 |
+
import torch.nn as nn
|
6 |
+
import torch.nn.functional as F
|
7 |
+
import torch.distributed as dist
|
8 |
+
from collections import OrderedDict
|
9 |
+
|
10 |
+
import numpy as np
|
11 |
+
from sklearn.metrics import roc_auc_score, roc_curve
|
12 |
+
from scipy.optimize import brentq
|
13 |
+
from scipy.interpolate import interp1d
|
14 |
+
|
15 |
+
# Tracking the path to the definition of the model.
|
16 |
+
MODELS_PATH = {
|
17 |
+
"Recce": "model/network/Recce.py"
|
18 |
+
}
|
19 |
+
|
20 |
+
|
21 |
+
def exp_recons_loss(recons, x):
|
22 |
+
x, y = x
|
23 |
+
loss = torch.tensor(0., device=y.device)
|
24 |
+
real_index = torch.where(1 - y)[0]
|
25 |
+
for r in recons:
|
26 |
+
if real_index.numel() > 0:
|
27 |
+
real_x = torch.index_select(x, dim=0, index=real_index)
|
28 |
+
real_rec = torch.index_select(r, dim=0, index=real_index)
|
29 |
+
real_rec = F.interpolate(real_rec, size=x.shape[-2:], mode='bilinear', align_corners=True)
|
30 |
+
loss += torch.mean(torch.abs(real_rec - real_x))
|
31 |
+
return loss
|
32 |
+
|
33 |
+
|
34 |
+
def center_print(content, around='*', repeat_around=10):
|
35 |
+
num = repeat_around
|
36 |
+
s = around
|
37 |
+
print(num * s + ' %s ' % content + num * s)
|
38 |
+
|
39 |
+
|
40 |
+
def reduce_tensor(t):
|
41 |
+
rt = t.clone()
|
42 |
+
dist.all_reduce(rt)
|
43 |
+
rt /= float(dist.get_world_size())
|
44 |
+
return rt
|
45 |
+
|
46 |
+
|
47 |
+
def tensor2image(tensor):
|
48 |
+
image = tensor.permute([1, 2, 0]).cpu().detach().numpy()
|
49 |
+
return (image - np.min(image)) / (np.max(image) - np.min(image))
|
50 |
+
|
51 |
+
|
52 |
+
def state_dict(state_dict):
|
53 |
+
""" Remove 'module' keyword in state dictionary. """
|
54 |
+
weights = OrderedDict()
|
55 |
+
for k, v in state_dict.items():
|
56 |
+
weights.update({k.replace("module.", ""): v})
|
57 |
+
return weights
|
58 |
+
|
59 |
+
|
60 |
+
class Logger(object):
|
61 |
+
def __init__(self, filename):
|
62 |
+
self.terminal = sys.stdout
|
63 |
+
self.log = open(filename, "a")
|
64 |
+
|
65 |
+
def write(self, message):
|
66 |
+
self.terminal.write(message)
|
67 |
+
self.log.write(message)
|
68 |
+
self.log.flush()
|
69 |
+
|
70 |
+
def flush(self):
|
71 |
+
pass
|
72 |
+
|
73 |
+
|
74 |
+
class Timer(object):
|
75 |
+
"""The class for timer."""
|
76 |
+
|
77 |
+
def __init__(self):
|
78 |
+
self.o = time.time()
|
79 |
+
|
80 |
+
def measure(self, p=1):
|
81 |
+
x = (time.time() - self.o) / p
|
82 |
+
x = int(x)
|
83 |
+
if x >= 3600:
|
84 |
+
return '{:.1f}h'.format(x / 3600)
|
85 |
+
if x >= 60:
|
86 |
+
return '{}m'.format(round(x / 60))
|
87 |
+
return '{}s'.format(x)
|
88 |
+
|
89 |
+
|
90 |
+
class MLLoss(nn.Module):
|
91 |
+
def __init__(self):
|
92 |
+
super(MLLoss, self).__init__()
|
93 |
+
|
94 |
+
def forward(self, input, target, eps=1e-6):
|
95 |
+
# 0 - real; 1 - fake.
|
96 |
+
loss = torch.tensor(0., device=target.device)
|
97 |
+
batch_size = target.shape[0]
|
98 |
+
mat_1 = torch.hstack([target.unsqueeze(-1)] * batch_size)
|
99 |
+
mat_2 = torch.vstack([target] * batch_size)
|
100 |
+
diff_mat = torch.logical_xor(mat_1, mat_2).float()
|
101 |
+
or_mat = torch.logical_or(mat_1, mat_2)
|
102 |
+
eye = torch.eye(batch_size, device=target.device)
|
103 |
+
or_mat = torch.logical_or(or_mat, eye).float()
|
104 |
+
sim_mat = 1. - or_mat
|
105 |
+
for _ in input:
|
106 |
+
diff = torch.sum(_ * diff_mat, dim=[0, 1]) / (torch.sum(diff_mat, dim=[0, 1]) + eps)
|
107 |
+
sim = torch.sum(_ * sim_mat, dim=[0, 1]) / (torch.sum(sim_mat, dim=[0, 1]) + eps)
|
108 |
+
partial_loss = 1. - sim + diff
|
109 |
+
loss += max(partial_loss, torch.zeros_like(partial_loss))
|
110 |
+
return loss
|
111 |
+
|
112 |
+
|
113 |
+
class AccMeter(object):
|
114 |
+
def __init__(self):
|
115 |
+
self.nums = 0
|
116 |
+
self.acc = 0
|
117 |
+
|
118 |
+
def reset(self):
|
119 |
+
self.nums = 0
|
120 |
+
self.acc = 0
|
121 |
+
|
122 |
+
def update(self, pred, target, use_bce=False):
|
123 |
+
if use_bce:
|
124 |
+
pred = (pred >= 0.5).int()
|
125 |
+
else:
|
126 |
+
pred = pred.argmax(1)
|
127 |
+
self.nums += target.shape[0]
|
128 |
+
self.acc += torch.sum(pred == target)
|
129 |
+
|
130 |
+
def mean_acc(self):
|
131 |
+
return self.acc / self.nums
|
132 |
+
|
133 |
+
|
134 |
+
class AUCMeter(object):
|
135 |
+
def __init__(self):
|
136 |
+
self.score = None
|
137 |
+
self.true = None
|
138 |
+
|
139 |
+
def reset(self):
|
140 |
+
self.score = None
|
141 |
+
self.true = None
|
142 |
+
|
143 |
+
def update(self, score, true, use_bce=False):
|
144 |
+
if use_bce:
|
145 |
+
score = score.detach().cpu().numpy()
|
146 |
+
else:
|
147 |
+
score = torch.softmax(score.detach(), dim=-1)
|
148 |
+
score = torch.select(score, 1, 1).cpu().numpy()
|
149 |
+
true = true.flatten().cpu().numpy()
|
150 |
+
self.score = score if self.score is None else np.concatenate([self.score, score])
|
151 |
+
self.true = true if self.true is None else np.concatenate([self.true, true])
|
152 |
+
|
153 |
+
def mean_auc(self):
|
154 |
+
return roc_auc_score(self.true, self.score)
|
155 |
+
|
156 |
+
def curve(self, prefix):
|
157 |
+
fpr, tpr, thresholds = roc_curve(self.true, self.score, pos_label=1)
|
158 |
+
eer = brentq(lambda x: 1. - x - interp1d(fpr, tpr)(x), 0., 1.)
|
159 |
+
thresh = interp1d(fpr, thresholds)(eer)
|
160 |
+
print(f"# EER: {eer:.4f}(thresh: {thresh:.4f})")
|
161 |
+
torch.save([fpr, tpr, thresholds], os.path.join(prefix, "roc_curve.pickle"))
|
162 |
+
|
163 |
+
|
164 |
+
class AverageMeter(object):
|
165 |
+
"""Computes and stores the average and current value"""
|
166 |
+
|
167 |
+
def __init__(self):
|
168 |
+
self.val = 0
|
169 |
+
self.avg = 0
|
170 |
+
self.sum = 0
|
171 |
+
self.count = 0
|
172 |
+
|
173 |
+
def reset(self):
|
174 |
+
self.val = 0
|
175 |
+
self.avg = 0
|
176 |
+
self.sum = 0
|
177 |
+
self.count = 0
|
178 |
+
|
179 |
+
def update(self, val, n=1):
|
180 |
+
self.val = val
|
181 |
+
self.sum += val * n
|
182 |
+
self.count += n
|
183 |
+
self.avg = self.sum / self.count
|