HZSDU commited on
Commit
475273d
·
verified ·
1 Parent(s): aadbb8e

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +32 -0
  2. .gitignore +178 -0
  3. .gradio/certificate.pem +31 -0
  4. .idea/.gitignore +8 -0
  5. .idea/AttentionDistillation-main.iml +8 -0
  6. .idea/deployment.xml +28 -0
  7. .idea/inspectionProfiles/Project_Default.xml +66 -0
  8. .idea/inspectionProfiles/profiles_settings.xml +6 -0
  9. .idea/misc.xml +7 -0
  10. .idea/modules.xml +8 -0
  11. .idea/workspace.xml +224 -0
  12. LICENSE +21 -0
  13. README.md +61 -0
  14. app.py +48 -0
  15. checkpoints/imagenet/hole_benchmark/20250208113201369767.log +5 -0
  16. checkpoints/imagenet/hole_benchmark/20250208141825018139.log +5 -0
  17. checkpoints/imagenet/hole_benchmark/20250208141954613001.log +5 -0
  18. checkpoints/imagenet/hole_benchmark/20250208142058422720.log +274 -0
  19. checkpoints/imagenet/hole_benchmark/20250427163138491612.log +273 -0
  20. checkpoints/imagenet/hole_benchmark/20250427163636067215.log +0 -0
  21. checkpoints/imagenet/hole_benchmark/config.yaml +52 -0
  22. checkpoints/imagenet/hole_benchmark/niter_470000.png +3 -0
  23. checkpoints/imagenet/hole_benchmark/niter_471000.png +3 -0
  24. checkpoints/imagenet/hole_benchmark/niter_472000.png +3 -0
  25. checkpoints/imagenet/hole_benchmark/niter_473000.png +3 -0
  26. checkpoints/imagenet/hole_benchmark/niter_474000.png +3 -0
  27. checkpoints/imagenet/hole_benchmark/niter_475000.png +3 -0
  28. checkpoints/imagenet/hole_benchmark/niter_476000.png +3 -0
  29. checkpoints/imagenet/hole_benchmark/niter_477000.png +3 -0
  30. checkpoints/imagenet/hole_benchmark/niter_478000.png +3 -0
  31. checkpoints/imagenet/hole_benchmark/niter_479000.png +3 -0
  32. checkpoints/imagenet/hole_benchmark/niter_480000.png +3 -0
  33. data/content/1.jpg +3 -0
  34. data/content/11.jpg +3 -0
  35. data/content/13.png +3 -0
  36. data/content/14.jpg +3 -0
  37. data/content/16.jpg +3 -0
  38. data/content/3.jpg +3 -0
  39. data/content/5.png +3 -0
  40. data/content/6.png +3 -0
  41. data/content/8.jpg +3 -0
  42. data/content/9.jpg +3 -0
  43. data/content/deer.jpg +3 -0
  44. data/style/1.jpg +3 -0
  45. data/style/1.png +3 -0
  46. data/style/10.jpg +3 -0
  47. data/style/12.jpg +3 -0
  48. data/style/23.png +3 -0
  49. data/style/3.jpg +3 -0
  50. data/style/5.jpg +3 -0
.gitattributes CHANGED
@@ -33,3 +33,35 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ checkpoints/imagenet/hole_benchmark/niter_470000.png filter=lfs diff=lfs merge=lfs -text
37
+ checkpoints/imagenet/hole_benchmark/niter_475000.png filter=lfs diff=lfs merge=lfs -text
38
+ checkpoints/imagenet/hole_benchmark/niter_474000.png filter=lfs diff=lfs merge=lfs -text
39
+ checkpoints/imagenet/hole_benchmark/niter_473000.png filter=lfs diff=lfs merge=lfs -text
40
+ checkpoints/imagenet/hole_benchmark/niter_471000.png filter=lfs diff=lfs merge=lfs -text
41
+ checkpoints/imagenet/hole_benchmark/niter_476000.png filter=lfs diff=lfs merge=lfs -text
42
+ checkpoints/imagenet/hole_benchmark/niter_472000.png filter=lfs diff=lfs merge=lfs -text
43
+ checkpoints/imagenet/hole_benchmark/niter_478000.png filter=lfs diff=lfs merge=lfs -text
44
+ checkpoints/imagenet/hole_benchmark/niter_477000.png filter=lfs diff=lfs merge=lfs -text
45
+ checkpoints/imagenet/hole_benchmark/niter_480000.png filter=lfs diff=lfs merge=lfs -text
46
+ checkpoints/imagenet/hole_benchmark/niter_479000.png filter=lfs diff=lfs merge=lfs -text
47
+ data/content/1.jpg filter=lfs diff=lfs merge=lfs -text
48
+ data/style/1.jpg filter=lfs diff=lfs merge=lfs -text
49
+ data/style/1.png filter=lfs diff=lfs merge=lfs -text
50
+ data/content/11.jpg filter=lfs diff=lfs merge=lfs -text
51
+ data/content/16.jpg filter=lfs diff=lfs merge=lfs -text
52
+ data/content/5.png filter=lfs diff=lfs merge=lfs -text
53
+ data/content/6.png filter=lfs diff=lfs merge=lfs -text
54
+ data/content/13.png filter=lfs diff=lfs merge=lfs -text
55
+ data/content/8.jpg filter=lfs diff=lfs merge=lfs -text
56
+ data/content/3.jpg filter=lfs diff=lfs merge=lfs -text
57
+ data/content/deer.jpg filter=lfs diff=lfs merge=lfs -text
58
+ data/style/10.jpg filter=lfs diff=lfs merge=lfs -text
59
+ data/content/14.jpg filter=lfs diff=lfs merge=lfs -text
60
+ data/content/9.jpg filter=lfs diff=lfs merge=lfs -text
61
+ data/style/23.png filter=lfs diff=lfs merge=lfs -text
62
+ data/style/12.jpg filter=lfs diff=lfs merge=lfs -text
63
+ data/style/3.jpg filter=lfs diff=lfs merge=lfs -text
64
+ data/style/5.jpg filter=lfs diff=lfs merge=lfs -text
65
+ data/style/59.png filter=lfs diff=lfs merge=lfs -text
66
+ data/texture/14.jpg filter=lfs diff=lfs merge=lfs -text
67
+ data/texture/15.jpg filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+ test.ipynb
85
+ output.png
86
+ style.png
87
+ content.png
88
+ output_roll.png
89
+
90
+
91
+
92
+ # pyenv
93
+ # For a library or package, you might want to ignore these files since the code is
94
+ # intended to run in multiple environments; otherwise, check them in:
95
+ # .python-version
96
+
97
+ # pipenv
98
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
99
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
100
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
101
+ # install all needed dependencies.
102
+ #Pipfile.lock
103
+
104
+ # UV
105
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
106
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
107
+ # commonly ignored for libraries.
108
+ #uv.lock
109
+
110
+ # poetry
111
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
112
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
113
+ # commonly ignored for libraries.
114
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
115
+ #poetry.lock
116
+
117
+ # pdm
118
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
119
+ #pdm.lock
120
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
121
+ # in version control.
122
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
123
+ .pdm.toml
124
+ .pdm-python
125
+ .pdm-build/
126
+
127
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128
+ __pypackages__/
129
+
130
+ # Celery stuff
131
+ celerybeat-schedule
132
+ celerybeat.pid
133
+
134
+ # SageMath parsed files
135
+ *.sage.py
136
+
137
+ # Environments
138
+ .env
139
+ .venv
140
+ env/
141
+ venv/
142
+ ENV/
143
+ env.bak/
144
+ venv.bak/
145
+
146
+ # Spyder project settings
147
+ .spyderproject
148
+ .spyproject
149
+
150
+ # Rope project settings
151
+ .ropeproject
152
+
153
+ # mkdocs documentation
154
+ /site
155
+
156
+ # mypy
157
+ .mypy_cache/
158
+ .dmypy.json
159
+ dmypy.json
160
+
161
+ # Pyre type checker
162
+ .pyre/
163
+
164
+ # pytype static type analyzer
165
+ .pytype/
166
+
167
+ # Cython debug symbols
168
+ cython_debug/
169
+
170
+ # PyCharm
171
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
172
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
173
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
174
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
175
+ #.idea/
176
+
177
+ # PyPI configuration file
178
+ .pypirc
.gradio/certificate.pem ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
3
+ TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
4
+ cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
5
+ WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
6
+ ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
7
+ MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
8
+ h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
9
+ 0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
10
+ A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
11
+ T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
12
+ B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
13
+ B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
14
+ KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
15
+ OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
16
+ jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
17
+ qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
18
+ rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
19
+ HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
20
+ hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
21
+ ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
22
+ 3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
23
+ NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
24
+ ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
25
+ TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
26
+ jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
27
+ oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
28
+ 4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
29
+ mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
30
+ emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
31
+ -----END CERTIFICATE-----
.idea/.gitignore ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # 默认忽略的文件
2
+ /shelf/
3
+ /workspace.xml
4
+ # 基于编辑器的 HTTP 客户端请求
5
+ /httpRequests/
6
+ # Datasource local storage ignored files
7
+ /dataSources/
8
+ /dataSources.local.xml
.idea/AttentionDistillation-main.iml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <module type="PYTHON_MODULE" version="4">
3
+ <component name="NewModuleRootManager">
4
+ <content url="file://$MODULE_DIR$" />
5
+ <orderEntry type="jdk" jdkName="attention_distillation" jdkType="Python SDK" />
6
+ <orderEntry type="sourceFolder" forTests="false" />
7
+ </component>
8
+ </module>
.idea/deployment.xml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="PublishConfigData" remoteFilesAllowedToDisappearOnAutoupload="false">
4
+ <serverData>
5
+ <paths name="root@connect.yza1.seetacloud.com:44585 password">
6
+ <serverdata>
7
+ <mappings>
8
+ <mapping local="$PROJECT_DIR$" web="/" />
9
+ </mappings>
10
+ </serverdata>
11
+ </paths>
12
+ <paths name="root@connect.yza1.seetacloud.com:44585 password (2)">
13
+ <serverdata>
14
+ <mappings>
15
+ <mapping local="$PROJECT_DIR$" web="/" />
16
+ </mappings>
17
+ </serverdata>
18
+ </paths>
19
+ <paths name="root@connect.yza1.seetacloud.com:44585 password (3)">
20
+ <serverdata>
21
+ <mappings>
22
+ <mapping local="$PROJECT_DIR$" web="/" />
23
+ </mappings>
24
+ </serverdata>
25
+ </paths>
26
+ </serverData>
27
+ </component>
28
+ </project>
.idea/inspectionProfiles/Project_Default.xml ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <component name="InspectionProjectProfileManager">
2
+ <profile version="1.0">
3
+ <option name="myName" value="Project Default" />
4
+ <inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
5
+ <option name="ignoredPackages">
6
+ <value>
7
+ <list size="53">
8
+ <item index="0" class="java.lang.String" itemvalue="wandb" />
9
+ <item index="1" class="java.lang.String" itemvalue="torch" />
10
+ <item index="2" class="java.lang.String" itemvalue="visdom" />
11
+ <item index="3" class="java.lang.String" itemvalue="torchvision" />
12
+ <item index="4" class="java.lang.String" itemvalue="dominate" />
13
+ <item index="5" class="java.lang.String" itemvalue="scikit-image" />
14
+ <item index="6" class="java.lang.String" itemvalue="onnxruntime-gpu" />
15
+ <item index="7" class="java.lang.String" itemvalue="httpx" />
16
+ <item index="8" class="java.lang.String" itemvalue="gradio" />
17
+ <item index="9" class="java.lang.String" itemvalue="entmax" />
18
+ <item index="10" class="java.lang.String" itemvalue="PyYAML" />
19
+ <item index="11" class="java.lang.String" itemvalue="xformers" />
20
+ <item index="12" class="java.lang.String" itemvalue="imageio-ffmpeg" />
21
+ <item index="13" class="java.lang.String" itemvalue="numpy" />
22
+ <item index="14" class="java.lang.String" itemvalue="opencv-python-headless" />
23
+ <item index="15" class="java.lang.String" itemvalue="lmdb" />
24
+ <item index="16" class="java.lang.String" itemvalue="submitit" />
25
+ <item index="17" class="java.lang.String" itemvalue="easydict" />
26
+ <item index="18" class="java.lang.String" itemvalue="kornia" />
27
+ <item index="19" class="java.lang.String" itemvalue="ftfy" />
28
+ <item index="20" class="java.lang.String" itemvalue="spacy" />
29
+ <item index="21" class="java.lang.String" itemvalue="pycocoevalcap" />
30
+ <item index="22" class="java.lang.String" itemvalue="safetensors" />
31
+ <item index="23" class="java.lang.String" itemvalue="accelerate" />
32
+ <item index="24" class="java.lang.String" itemvalue="bson" />
33
+ <item index="25" class="java.lang.String" itemvalue="notebook" />
34
+ <item index="26" class="java.lang.String" itemvalue="scipy" />
35
+ <item index="27" class="java.lang.String" itemvalue="transformers" />
36
+ <item index="28" class="java.lang.String" itemvalue="timm" />
37
+ <item index="29" class="java.lang.String" itemvalue="thop" />
38
+ <item index="30" class="java.lang.String" itemvalue="diffusers" />
39
+ <item index="31" class="java.lang.String" itemvalue="k-diffusion" />
40
+ <item index="32" class="java.lang.String" itemvalue="pytorch_lightning" />
41
+ <item index="33" class="java.lang.String" itemvalue="ipykernel" />
42
+ <item index="34" class="java.lang.String" itemvalue="click" />
43
+ <item index="35" class="java.lang.String" itemvalue="omegaconf" />
44
+ <item index="36" class="java.lang.String" itemvalue="albumentations" />
45
+ <item index="37" class="java.lang.String" itemvalue="tqdm" />
46
+ <item index="38" class="java.lang.String" itemvalue="pandas" />
47
+ <item index="39" class="java.lang.String" itemvalue="torch-fidelity" />
48
+ <item index="40" class="java.lang.String" itemvalue="einops-exts" />
49
+ <item index="41" class="java.lang.String" itemvalue="imageio" />
50
+ <item index="42" class="java.lang.String" itemvalue="ninja" />
51
+ <item index="43" class="java.lang.String" itemvalue="pylint" />
52
+ <item index="44" class="java.lang.String" itemvalue="fairscale" />
53
+ <item index="45" class="java.lang.String" itemvalue="pudb" />
54
+ <item index="46" class="java.lang.String" itemvalue="test-tube" />
55
+ <item index="47" class="java.lang.String" itemvalue="matplotlib" />
56
+ <item index="48" class="java.lang.String" itemvalue="webdataset" />
57
+ <item index="49" class="java.lang.String" itemvalue="invisible-watermark" />
58
+ <item index="50" class="java.lang.String" itemvalue="einops" />
59
+ <item index="51" class="java.lang.String" itemvalue="open_clip_torch" />
60
+ <item index="52" class="java.lang.String" itemvalue="decord" />
61
+ </list>
62
+ </value>
63
+ </option>
64
+ </inspection_tool>
65
+ </profile>
66
+ </component>
.idea/inspectionProfiles/profiles_settings.xml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <component name="InspectionProjectProfileManager">
2
+ <settings>
3
+ <option name="USE_PROJECT_PROFILE" value="false" />
4
+ <version value="1.0" />
5
+ </settings>
6
+ </component>
.idea/misc.xml ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="Black">
4
+ <option name="sdkName" value="deadiff" />
5
+ </component>
6
+ <component name="ProjectRootManager" version="2" project-jdk-name="attention_distillation" project-jdk-type="Python SDK" />
7
+ </project>
.idea/modules.xml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="ProjectModuleManager">
4
+ <modules>
5
+ <module fileurl="file://$PROJECT_DIR$/.idea/AttentionDistillation-main.iml" filepath="$PROJECT_DIR$/.idea/AttentionDistillation-main.iml" />
6
+ </modules>
7
+ </component>
8
+ </project>
.idea/workspace.xml ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="AutoImportSettings">
4
+ <option name="autoReloadType" value="SELECTIVE" />
5
+ </component>
6
+ <component name="ChangeListManager">
7
+ <list default="true" id="1083fbb7-7041-4b55-8c11-ef630eacf05e" name="更改" comment="" />
8
+ <option name="SHOW_DIALOG" value="false" />
9
+ <option name="HIGHLIGHT_CONFLICTS" value="true" />
10
+ <option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
11
+ <option name="LAST_RESOLUTION" value="IGNORE" />
12
+ </component>
13
+ <component name="FileTemplateManagerImpl">
14
+ <option name="RECENT_TEMPLATES">
15
+ <list>
16
+ <option value="Python Script" />
17
+ </list>
18
+ </option>
19
+ </component>
20
+ <component name="FlaskConsoleOptions" custom-start-script="import sys; print('Python %s on %s' % (sys.version, sys.platform)); sys.path.extend([WORKING_DIR_AND_PYTHON_PATHS])&#10;from flask.cli import ScriptInfo, NoAppException&#10;for module in [&quot;main.py&quot;, &quot;wsgi.py&quot;, &quot;app.py&quot;]:&#10; try: locals().update(ScriptInfo(app_import_path=module, create_app=None).load_app().make_shell_context()); print(&quot;\nFlask App: %s&quot; % app.import_name); break&#10; except NoAppException: pass">
21
+ <envs>
22
+ <env key="FLASK_APP" value="app" />
23
+ </envs>
24
+ <option name="myCustomStartScript" value="import sys; print('Python %s on %s' % (sys.version, sys.platform)); sys.path.extend([WORKING_DIR_AND_PYTHON_PATHS])&#10;from flask.cli import ScriptInfo, NoAppException&#10;for module in [&quot;main.py&quot;, &quot;wsgi.py&quot;, &quot;app.py&quot;]:&#10; try: locals().update(ScriptInfo(app_import_path=module, create_app=None).load_app().make_shell_context()); print(&quot;\nFlask App: %s&quot; % app.import_name); break&#10; except NoAppException: pass" />
25
+ <option name="myEnvs">
26
+ <map>
27
+ <entry key="FLASK_APP" value="app" />
28
+ </map>
29
+ </option>
30
+ </component>
31
+ <component name="ProjectColorInfo">{
32
+ &quot;associatedIndex&quot;: 1
33
+ }</component>
34
+ <component name="ProjectId" id="2ucWH08KyDpGRCOPck6XmbFKMog" />
35
+ <component name="ProjectViewState">
36
+ <option name="hideEmptyMiddlePackages" value="true" />
37
+ <option name="showLibraryContents" value="true" />
38
+ </component>
39
+ <component name="PropertiesComponent"><![CDATA[{
40
+ "keyToString": {
41
+ "Python.app.executor": "Run",
42
+ "Python.image_matting.executor": "Run",
43
+ "Python.mypipeline.executor": "Run",
44
+ "Python.pipeline_sd.executor": "Run",
45
+ "Python.runner.executor": "Debug",
46
+ "Python.tab_generative_inpainting.executor": "Run",
47
+ "Python.train_vae.executor": "Run",
48
+ "Python.upload.executor": "Run",
49
+ "RunOnceActivity.ShowReadmeOnStart": "true",
50
+ "last_opened_file_path": "D:/learn_torch/Exploration_Platform",
51
+ "node.js.detected.package.eslint": "true",
52
+ "node.js.detected.package.tslint": "true",
53
+ "node.js.selected.package.eslint": "(autodetect)",
54
+ "node.js.selected.package.tslint": "(autodetect)",
55
+ "nodejs_package_manager_path": "npm",
56
+ "settings.editor.selected.configurable": "com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable",
57
+ "vue.rearranger.settings.migration": "true"
58
+ }
59
+ }]]></component>
60
+ <component name="RecentsManager">
61
+ <key name="CopyFile.RECENT_KEYS">
62
+ <recent name="D:\learn_torch\Exploration_Platform\sddfrcnn_model\backbone" />
63
+ <recent name="D:\learn_torch\Exploration_Platform\sddfrcnn_model" />
64
+ <recent name="D:\learn_torch\AttentionDistillation-main\cyclegan_model\data" />
65
+ <recent name="D:\learn_torch\AttentionDistillation-main\checkpoints\ostracoda_cyclegan" />
66
+ <recent name="D:\learn_torch\AttentionDistillation-main" />
67
+ </key>
68
+ <key name="MoveFile.RECENT_KEYS">
69
+ <recent name="D:\learn_torch\AttentionDistillation-main\cyclegan_model\data" />
70
+ </key>
71
+ </component>
72
+ <component name="SharedIndexes">
73
+ <attachedChunks>
74
+ <set>
75
+ <option value="bundled-js-predefined-d6986cc7102b-76f8388c3a79-JavaScript-PY-243.24978.54" />
76
+ <option value="bundled-python-sdk-91e3b7efe1d4-466328ff949b-com.jetbrains.pycharm.pro.sharedIndexes.bundled-PY-243.24978.54" />
77
+ </set>
78
+ </attachedChunks>
79
+ </component>
80
+ <component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="应用程序级" UseSingleDictionary="true" transferred="true" />
81
+ <component name="TaskManager">
82
+ <task active="true" id="Default" summary="默认任务">
83
+ <changelist id="1083fbb7-7041-4b55-8c11-ef630eacf05e" name="更改" comment="" />
84
+ <created>1742548251150</created>
85
+ <option name="number" value="Default" />
86
+ <option name="presentableId" value="Default" />
87
+ <updated>1742548251150</updated>
88
+ <workItem from="1742548252491" duration="798000" />
89
+ <workItem from="1742549282980" duration="2194000" />
90
+ <workItem from="1742552372742" duration="4696000" />
91
+ <workItem from="1742635841903" duration="325000" />
92
+ <workItem from="1742648621034" duration="595000" />
93
+ <workItem from="1742719699645" duration="2000" />
94
+ <workItem from="1743216840325" duration="1767000" />
95
+ <workItem from="1743662233167" duration="1535000" />
96
+ <workItem from="1744635278552" duration="2431000" />
97
+ <workItem from="1744640273907" duration="3983000" />
98
+ <workItem from="1744678207767" duration="2461000" />
99
+ <workItem from="1744880324290" duration="414000" />
100
+ <workItem from="1744939655177" duration="20359000" />
101
+ <workItem from="1745048987395" duration="17000" />
102
+ <workItem from="1745573844226" duration="4781000" />
103
+ <workItem from="1745663870078" duration="335000" />
104
+ <workItem from="1745718366404" duration="806000" />
105
+ <workItem from="1745808383524" duration="607000" />
106
+ <workItem from="1745838627405" duration="164000" />
107
+ <workItem from="1747998454711" duration="3717000" />
108
+ <workItem from="1748050684818" duration="377000" />
109
+ <workItem from="1748071164733" duration="11000" />
110
+ <workItem from="1748872840035" duration="1408000" />
111
+ <workItem from="1748916777021" duration="72000" />
112
+ <workItem from="1748917369804" duration="41000" />
113
+ <workItem from="1748920842954" duration="417000" />
114
+ <workItem from="1751684711469" duration="341000" />
115
+ <workItem from="1751854455413" duration="3555000" />
116
+ <workItem from="1751869719681" duration="594000" />
117
+ <workItem from="1751871285436" duration="561000" />
118
+ <workItem from="1751956633919" duration="596000" />
119
+ <workItem from="1752137515894" duration="521000" />
120
+ <workItem from="1752392642220" duration="2209000" />
121
+ <workItem from="1752630844455" duration="55000" />
122
+ <workItem from="1753082200556" duration="3000" />
123
+ <workItem from="1753083069650" duration="10735000" />
124
+ <workItem from="1753490990062" duration="5651000" />
125
+ <workItem from="1753497283557" duration="10008000" />
126
+ <workItem from="1753585532274" duration="2068000" />
127
+ <workItem from="1753602012203" duration="7603000" />
128
+ <workItem from="1753685072770" duration="10994000" />
129
+ <workItem from="1753859151250" duration="3339000" />
130
+ <workItem from="1753865998136" duration="1474000" />
131
+ <workItem from="1753927860843" duration="4048000" />
132
+ <workItem from="1753945517428" duration="1929000" />
133
+ <workItem from="1753947728608" duration="5942000" />
134
+ <workItem from="1754032983414" duration="3025000" />
135
+ <workItem from="1754036883225" duration="2942000" />
136
+ <workItem from="1754040777435" duration="4153000" />
137
+ <workItem from="1754047686677" duration="2860000" />
138
+ <workItem from="1754096713959" duration="5860000" />
139
+ <workItem from="1754121251485" duration="1129000" />
140
+ <workItem from="1754302067409" duration="633000" />
141
+ <workItem from="1754305302770" duration="3923000" />
142
+ <workItem from="1755243282650" duration="6000" />
143
+ <workItem from="1755243933862" duration="2853000" />
144
+ <workItem from="1755248870998" duration="5439000" />
145
+ <workItem from="1755308268284" duration="1592000" />
146
+ <workItem from="1755309878783" duration="4531000" />
147
+ <workItem from="1755334170561" duration="1938000" />
148
+ <workItem from="1755402743679" duration="1168000" />
149
+ <workItem from="1755414025669" duration="2007000" />
150
+ <workItem from="1755417642187" duration="6779000" />
151
+ <workItem from="1755505843358" duration="1528000" />
152
+ <workItem from="1756110022009" duration="6857000" />
153
+ <workItem from="1756178619552" duration="1809000" />
154
+ <workItem from="1756212567309" duration="5378000" />
155
+ <workItem from="1756257376979" duration="190000" />
156
+ <workItem from="1756375737510" duration="2088000" />
157
+ <workItem from="1756432285531" duration="7494000" />
158
+ <workItem from="1756454901629" duration="1559000" />
159
+ <workItem from="1756456742831" duration="307000" />
160
+ <workItem from="1756457077399" duration="6213000" />
161
+ <workItem from="1756605762302" duration="4362000" />
162
+ <workItem from="1756693325112" duration="1388000" />
163
+ <workItem from="1756725448037" duration="7909000" />
164
+ <workItem from="1756801208868" duration="8051000" />
165
+ <workItem from="1756812307889" duration="392000" />
166
+ <workItem from="1756985180876" duration="2635000" />
167
+ <workItem from="1757488354834" duration="5339000" />
168
+ <workItem from="1757552553916" duration="2949000" />
169
+ <workItem from="1757573391936" duration="3701000" />
170
+ </task>
171
+ <servers />
172
+ </component>
173
+ <component name="TypeScriptGeneratedFilesManager">
174
+ <option name="version" value="3" />
175
+ </component>
176
+ <component name="XDebuggerManager">
177
+ <breakpoint-manager>
178
+ <breakpoints>
179
+ <line-breakpoint enabled="true" suspend="THREAD" type="python-line">
180
+ <url>file://$PROJECT_DIR$/pipeline_sd.py</url>
181
+ <line>361</line>
182
+ <option name="timeStamp" value="1" />
183
+ </line-breakpoint>
184
+ <line-breakpoint enabled="true" suspend="THREAD" type="python-line">
185
+ <url>file://$PROJECT_DIR$/pipeline_sd.py</url>
186
+ <line>355</line>
187
+ <option name="timeStamp" value="2" />
188
+ </line-breakpoint>
189
+ <line-breakpoint enabled="true" suspend="THREAD" type="python-line">
190
+ <url>file://$PROJECT_DIR$/pipeline_sd.py</url>
191
+ <line>357</line>
192
+ <option name="timeStamp" value="3" />
193
+ </line-breakpoint>
194
+ <line-breakpoint enabled="true" suspend="THREAD" type="python-line">
195
+ <url>file://$PROJECT_DIR$/yolov8_model/ultralytics/trackers/utils/gmc.py</url>
196
+ <line>2</line>
197
+ <option name="timeStamp" value="6" />
198
+ </line-breakpoint>
199
+ <line-breakpoint enabled="true" suspend="THREAD" type="python-line">
200
+ <url>file://$PROJECT_DIR$/yolov8_model/ultralytics/nn/autobackend.py</url>
201
+ <line>532</line>
202
+ <option name="timeStamp" value="10" />
203
+ </line-breakpoint>
204
+ </breakpoints>
205
+ </breakpoint-manager>
206
+ <watches-manager>
207
+ <configuration name="PythonConfigurationType">
208
+ <watch expression="export_formats" />
209
+ </configuration>
210
+ </watches-manager>
211
+ </component>
212
+ <component name="com.intellij.coverage.CoverageDataManagerImpl">
213
+ <SUITE FILE_PATH="coverage/Exploration_Platform$runner.coverage" NAME="runner 覆盖结果" MODIFIED="1756727614244" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/webui" />
214
+ <SUITE FILE_PATH="coverage/Exploration_Platform$tab_generative_inpainting.coverage" NAME="tab_generative_inpainting 覆盖结果" MODIFIED="1755415057203" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/webui" />
215
+ <SUITE FILE_PATH="coverage/Exploration_Platform$upload.coverage" NAME="upload 覆盖结果" MODIFIED="1757577789232" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
216
+ <SUITE FILE_PATH="coverage/AttentionDistillation_main$image_matting.coverage" NAME="image_matting 覆盖结果" MODIFIED="1745575524061" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/matting" />
217
+ <SUITE FILE_PATH="coverage/AttentionDistillation_main$train_vae.coverage" NAME="train_vae 覆盖结果" MODIFIED="1744957607803" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
218
+ <SUITE FILE_PATH="coverage/AttentionDistillation_main$app.coverage" NAME="app 覆盖结果" MODIFIED="1754101150864" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
219
+ <SUITE FILE_PATH="coverage/AttentionDistillation_main$pipeline_sd.coverage" NAME="pipeline_sd 覆盖结果" MODIFIED="1742556501169" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
220
+ <SUITE FILE_PATH="coverage/Exploration_Platform$app.coverage" NAME="app 覆盖结果" MODIFIED="1757554477400" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
221
+ <SUITE FILE_PATH="coverage/AttentionDistillation_main$runner.coverage" NAME="runner 覆盖结果" MODIFIED="1753606062538" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/webui" />
222
+ <SUITE FILE_PATH="coverage/AttentionDistillation_main$mypipeline.coverage" NAME="mypipeline 覆盖结果" MODIFIED="1744880478148" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/flux_ad" />
223
+ </component>
224
+ </project>
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 gaoxu
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Exploration_Platform
3
+ app_file: app.py
4
+ sdk: gradio
5
+ sdk_version: 5.32.1
6
+ ---
7
+ # Attention Distillation: A Unified Approach to Visual Characteristics Transfer
8
+
9
+
10
+ ### [Project Page](https://xugao97.github.io/AttentionDistillation/) &ensp; [Paper](https://arxiv.org/abs/2502.20235)
11
+ ![alt text](assets/1.jpg)
12
+
13
+ ## 🔥🔥 News
14
+ * **2025/03/08**: We provide a new notebook with `Style-specific T2I Generation with Flux.1-dev`. See [Issue 1](https://github.com/xugao97/AttentionDistillation/issues/1) for more details.
15
+
16
+ * **2025/03/05**: We add `tiling` to enable seamless textures generation. See [Issue 3](https://github.com/xugao97/AttentionDistillation/issues/3) for more details.
17
+
18
+ * **2025/03/01**: We provide a simple HuggingFace🤗 demo. Check it out [here](https://huggingface.co/spaces/ccchenzc/AttentionDistillation) !
19
+
20
+
21
+ ## Setup
22
+
23
+ This code was tested with Python 3.10, Pytorch 2.5 and Diffusers 0.32.
24
+
25
+ ## Examples
26
+ ### Texture Synthesis
27
+ - See [**Texture Synthesis**] part of [ad] notebook for generating texture images using SD1.5.
28
+
29
+ ![alt text](assets/2.jpg)
30
+
31
+ ### Style/Appearance Transfer
32
+ - See [**Style/Appearance Transfer**] part of [ad] notebook for style/appearance transfer using SD1.5.
33
+
34
+ ![alt text](assets/3.jpg)
35
+
36
+ ### Style-specific T2I Generation
37
+ - See [**Style-specific T2I Generation**] part of [ad] notebook for style-specific T2I generation using SD1.5 or SDXL.
38
+
39
+ ![alt text](assets/4.jpg)
40
+
41
+ [ad]: ad.ipynb
42
+
43
+
44
+ ### VAE Finetuning
45
+
46
+ ```bash
47
+ python train_vae.py \
48
+ --image_path=/path/to/image \
49
+ --vae_model_path=/path/to/vae
50
+ ```
51
+
52
+
53
+ ### Web UI
54
+ Run the following command to start the Web UI:
55
+ ```bash
56
+ python app.py
57
+ ```
58
+ The Web UI will be available at [http://localhost:7860](http://localhost:7860).
59
+
60
+ ### ComfyUI
61
+ We also provide an implementation of Attention Distillation for ComfyUI. For more details, see [here](https://github.com/zichongc/ComfyUI-Attention-Distillation).
app.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from webui import (
3
+ create_interface_sddfrcnn,
4
+ create_interface_cyclegan,
5
+ create_interactive_generative_inpainting,
6
+ create_interface_style_transfer,
7
+ create_interface_yolov8
8
+ )
9
+ from webui.runner import AttentionRunner,InpaintingRunner,CycleGANRunner,SDDFRCNNRunner,YOLORunner
10
+ import os
11
+ os.environ["no_proxy"] = "localhost,127.0.0.1,::1"
12
+
13
+
14
+ def main():
15
+ attention_runner = AttentionRunner()
16
+ inpainting_runner = InpaintingRunner()
17
+ cyclegan_runner = CycleGANRunner()
18
+ sddfrcnn_runner = SDDFRCNNRunner()
19
+ yolo_runner = YOLORunner()
20
+
21
+ with gr.Blocks(analytics_enabled=False,
22
+ title='Mars Life Exploration Platform',
23
+ ) as demo:
24
+ md_txt = "# 火星生命探索平台" \
25
+ "\n一个探索火星生命的综合平台,在这里你可以在样本中发现可能的生物体,修复它们,并尝试还原它们生前的样貌."
26
+ gr.Markdown(md_txt)
27
+ with gr.Tabs(selected='tab_sdd&frcnn'):
28
+ with gr.TabItem("SDD & FRCNN",id='tab_sdd&frcnn'):
29
+ create_interface_sddfrcnn(sddfrcnn_runner)
30
+
31
+ with gr.TabItem("YOLOv8",id='tab_yolov8'):
32
+ create_interface_yolov8(yolo_runner)
33
+
34
+ with gr.TabItem("Generative Inpainting", id='tab_generative_inpainting'):
35
+ create_interactive_generative_inpainting(inpainting_runner)
36
+
37
+ with gr.TabItem("Style Transfer", id='tab_style_transfer'):
38
+ create_interface_style_transfer(runner= attention_runner)
39
+
40
+ with gr.TabItem("CycleGAN", id='tab_cyclegan'):
41
+ create_interface_cyclegan(runner= cyclegan_runner)
42
+
43
+ # demo.queue().launch()
44
+ demo.launch(share=True, debug=False)
45
+
46
+
47
+ if __name__ == '__main__':
48
+ main()
checkpoints/imagenet/hole_benchmark/20250208113201369767.log ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ 2025-02-08 11:32:01,370 INFO Arguments: Namespace(config='configs/config.yaml', seed=None)
2
+ 2025-02-08 11:32:01,370 INFO Random seed: 3449
3
+ 2025-02-08 11:32:01,371 INFO Configuration: {'dataset_name': 'imagenet', 'data_with_subfolder': 'ture', 'train_data_path': '/media/ouc/4T_A/datasets/ImageNet/ILSVRC2012_img_train/', 'val_data_path': None, 'resume': None, 'batch_size': 48, 'image_shape': [256, 256, 3], 'mask_shape': [128, 128], 'mask_batch_same': True, 'max_delta_shape': [32, 32], 'margin': [0, 0], 'discounted_mask': True, 'spatial_discounting_gamma': 0.9, 'random_crop': True, 'mask_type': 'hole', 'mosaic_unit_size': 12, 'expname': 'benchmark', 'cuda': True, 'gpu_ids': [0], 'num_workers': 4, 'lr': 0.0001, 'beta1': 0.5, 'beta2': 0.9, 'n_critic': 5, 'niter': 500000, 'print_iter': 100, 'viz_iter': 1000, 'viz_max_out': 16, 'snapshot_save_iter': 5000, 'coarse_l1_alpha': 1.2, 'l1_loss_alpha': 1.2, 'ae_loss_alpha': 1.2, 'global_wgan_loss_alpha': 1.0, 'gan_loss_alpha': 0.001, 'wgan_gp_lambda': 10, 'netG': {'input_dim': 3, 'ngf': 32}, 'netD': {'input_dim': 3, 'ndf': 64}}
4
+ 2025-02-08 11:32:01,371 INFO Training on dataset: imagenet
5
+ 2025-02-08 11:32:01,373 ERROR [WinError 3] ϵͳ�Ҳ���ָ����·����: '/media/ouc/4T_A/datasets/ImageNet/ILSVRC2012_img_train/'
checkpoints/imagenet/hole_benchmark/20250208141825018139.log ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ 2025-02-08 14:18:25,018 INFO Arguments: Namespace(config='configs/config.yaml', seed=None)
2
+ 2025-02-08 14:18:25,019 INFO Random seed: 4990
3
+ 2025-02-08 14:18:25,020 INFO Configuration: {'dataset_name': 'imagenet', 'data_with_subfolder': 'ture', 'train_data_path': 'G:/generative-inpainting-pytorch-master/traindata/train', 'val_data_path': None, 'resume': 'D:\\generative-inpainting-pytorch-master\\checkpoints\\imagenet\\hole_benchmark', 'batch_size': 4, 'image_shape': [256, 256, 3], 'mask_shape': [128, 128], 'mask_batch_same': True, 'max_delta_shape': [32, 32], 'margin': [0, 0], 'discounted_mask': True, 'spatial_discounting_gamma': 0.9, 'random_crop': True, 'mask_type': 'hole', 'mosaic_unit_size': 12, 'expname': 'benchmark', 'cuda': True, 'gpu_ids': [0], 'num_workers': 4, 'lr': 0.0001, 'beta1': 0.5, 'beta2': 0.9, 'n_critic': 5, 'niter': 500000, 'print_iter': 100, 'viz_iter': 1000, 'viz_max_out': 16, 'snapshot_save_iter': 5000, 'coarse_l1_alpha': 1.2, 'l1_loss_alpha': 1.2, 'ae_loss_alpha': 1.2, 'global_wgan_loss_alpha': 1.0, 'gan_loss_alpha': 0.001, 'wgan_gp_lambda': 10, 'netG': {'input_dim': 3, 'ngf': 32}, 'netD': {'input_dim': 3, 'ndf': 64}}
4
+ 2025-02-08 14:18:25,020 INFO Training on dataset: imagenet
5
+ 2025-02-08 14:18:25,021 ERROR num_samples should be a positive integer value, but got num_samples=0
checkpoints/imagenet/hole_benchmark/20250208141954613001.log ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ 2025-02-08 14:19:54,614 INFO Arguments: Namespace(config='configs/config.yaml', seed=None)
2
+ 2025-02-08 14:19:54,614 INFO Random seed: 7436
3
+ 2025-02-08 14:19:54,615 INFO Configuration: {'dataset_name': 'imagenet', 'data_with_subfolder': 'ture', 'train_data_path': 'G:/generative-inpainting-pytorch-master/traindata/train', 'val_data_path': 'G:/generative-inpainting-pytorch-master/traindata/val', 'resume': 'D:\\generative-inpainting-pytorch-master\\checkpoints\\imagenet\\hole_benchmark', 'batch_size': 4, 'image_shape': [256, 256, 3], 'mask_shape': [128, 128], 'mask_batch_same': True, 'max_delta_shape': [32, 32], 'margin': [0, 0], 'discounted_mask': True, 'spatial_discounting_gamma': 0.9, 'random_crop': True, 'mask_type': 'hole', 'mosaic_unit_size': 12, 'expname': 'benchmark', 'cuda': True, 'gpu_ids': [0], 'num_workers': 4, 'lr': 3e-05, 'beta1': 0.5, 'beta2': 0.9, 'n_critic': 5, 'niter': 430500, 'print_iter': 100, 'viz_iter': 1000, 'viz_max_out': 16, 'snapshot_save_iter': 5000, 'coarse_l1_alpha': 1.2, 'l1_loss_alpha': 1.2, 'ae_loss_alpha': 1.2, 'global_wgan_loss_alpha': 1.0, 'gan_loss_alpha': 0.001, 'wgan_gp_lambda': 10, 'netG': {'input_dim': 3, 'ngf': 32}, 'netD': {'input_dim': 3, 'ndf': 64}}
4
+ 2025-02-08 14:19:54,616 INFO Training on dataset: imagenet
5
+ 2025-02-08 14:19:54,618 ERROR num_samples should be a positive integer value, but got num_samples=0
checkpoints/imagenet/hole_benchmark/20250208142058422720.log ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-02-08 14:20:58,423 INFO Arguments: Namespace(config='configs/config.yaml', seed=None)
2
+ 2025-02-08 14:20:58,423 INFO Random seed: 2437
3
+ 2025-02-08 14:20:58,424 INFO Configuration: {'dataset_name': 'imagenet', 'data_with_subfolder': False, 'train_data_path': 'G:/generative-inpainting-pytorch-master/traindata/train', 'val_data_path': 'G:/generative-inpainting-pytorch-master/traindata/val', 'resume': 'D:\\generative-inpainting-pytorch-master\\checkpoints\\imagenet\\hole_benchmark', 'batch_size': 4, 'image_shape': [256, 256, 3], 'mask_shape': [128, 128], 'mask_batch_same': True, 'max_delta_shape': [32, 32], 'margin': [0, 0], 'discounted_mask': True, 'spatial_discounting_gamma': 0.9, 'random_crop': True, 'mask_type': 'hole', 'mosaic_unit_size': 12, 'expname': 'benchmark', 'cuda': True, 'gpu_ids': [0], 'num_workers': 4, 'lr': 3e-05, 'beta1': 0.5, 'beta2': 0.9, 'n_critic': 5, 'niter': 430500, 'print_iter': 100, 'viz_iter': 1000, 'viz_max_out': 16, 'snapshot_save_iter': 5000, 'coarse_l1_alpha': 1.2, 'l1_loss_alpha': 1.2, 'ae_loss_alpha': 1.2, 'global_wgan_loss_alpha': 1.0, 'gan_loss_alpha': 0.001, 'wgan_gp_lambda': 10, 'netG': {'input_dim': 3, 'ngf': 32}, 'netD': {'input_dim': 3, 'ndf': 64}}
4
+ 2025-02-08 14:20:58,424 INFO Training on dataset: imagenet
5
+ 2025-02-08 14:20:59,771 INFO
6
+ Generator(
7
+ (coarse_generator): CoarseGenerator(
8
+ (conv1): Conv2dBlock(
9
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
10
+ (activation): ELU(alpha=1.0, inplace=True)
11
+ (conv): Conv2d(5, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
12
+ )
13
+ (conv2_downsample): Conv2dBlock(
14
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
15
+ (activation): ELU(alpha=1.0, inplace=True)
16
+ (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
17
+ )
18
+ (conv3): Conv2dBlock(
19
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
20
+ (activation): ELU(alpha=1.0, inplace=True)
21
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
22
+ )
23
+ (conv4_downsample): Conv2dBlock(
24
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
25
+ (activation): ELU(alpha=1.0, inplace=True)
26
+ (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
27
+ )
28
+ (conv5): Conv2dBlock(
29
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
30
+ (activation): ELU(alpha=1.0, inplace=True)
31
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
32
+ )
33
+ (conv6): Conv2dBlock(
34
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
35
+ (activation): ELU(alpha=1.0, inplace=True)
36
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
37
+ )
38
+ (conv7_atrous): Conv2dBlock(
39
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
40
+ (activation): ELU(alpha=1.0, inplace=True)
41
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(2, 2), dilation=(2, 2))
42
+ )
43
+ (conv8_atrous): Conv2dBlock(
44
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
45
+ (activation): ELU(alpha=1.0, inplace=True)
46
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(4, 4), dilation=(4, 4))
47
+ )
48
+ (conv9_atrous): Conv2dBlock(
49
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
50
+ (activation): ELU(alpha=1.0, inplace=True)
51
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(8, 8), dilation=(8, 8))
52
+ )
53
+ (conv10_atrous): Conv2dBlock(
54
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
55
+ (activation): ELU(alpha=1.0, inplace=True)
56
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(16, 16), dilation=(16, 16))
57
+ )
58
+ (conv11): Conv2dBlock(
59
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
60
+ (activation): ELU(alpha=1.0, inplace=True)
61
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
62
+ )
63
+ (conv12): Conv2dBlock(
64
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
65
+ (activation): ELU(alpha=1.0, inplace=True)
66
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
67
+ )
68
+ (conv13): Conv2dBlock(
69
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
70
+ (activation): ELU(alpha=1.0, inplace=True)
71
+ (conv): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
72
+ )
73
+ (conv14): Conv2dBlock(
74
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
75
+ (activation): ELU(alpha=1.0, inplace=True)
76
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
77
+ )
78
+ (conv15): Conv2dBlock(
79
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
80
+ (activation): ELU(alpha=1.0, inplace=True)
81
+ (conv): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
82
+ )
83
+ (conv16): Conv2dBlock(
84
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
85
+ (activation): ELU(alpha=1.0, inplace=True)
86
+ (conv): Conv2d(32, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
87
+ )
88
+ (conv17): Conv2dBlock(
89
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
90
+ (conv): Conv2d(16, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
91
+ )
92
+ )
93
+ (fine_generator): FineGenerator(
94
+ (conv1): Conv2dBlock(
95
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
96
+ (activation): ELU(alpha=1.0, inplace=True)
97
+ (conv): Conv2d(5, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
98
+ )
99
+ (conv2_downsample): Conv2dBlock(
100
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
101
+ (activation): ELU(alpha=1.0, inplace=True)
102
+ (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
103
+ )
104
+ (conv3): Conv2dBlock(
105
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
106
+ (activation): ELU(alpha=1.0, inplace=True)
107
+ (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
108
+ )
109
+ (conv4_downsample): Conv2dBlock(
110
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
111
+ (activation): ELU(alpha=1.0, inplace=True)
112
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
113
+ )
114
+ (conv5): Conv2dBlock(
115
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
116
+ (activation): ELU(alpha=1.0, inplace=True)
117
+ (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
118
+ )
119
+ (conv6): Conv2dBlock(
120
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
121
+ (activation): ELU(alpha=1.0, inplace=True)
122
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
123
+ )
124
+ (conv7_atrous): Conv2dBlock(
125
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
126
+ (activation): ELU(alpha=1.0, inplace=True)
127
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(2, 2), dilation=(2, 2))
128
+ )
129
+ (conv8_atrous): Conv2dBlock(
130
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
131
+ (activation): ELU(alpha=1.0, inplace=True)
132
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(4, 4), dilation=(4, 4))
133
+ )
134
+ (conv9_atrous): Conv2dBlock(
135
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
136
+ (activation): ELU(alpha=1.0, inplace=True)
137
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(8, 8), dilation=(8, 8))
138
+ )
139
+ (conv10_atrous): Conv2dBlock(
140
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
141
+ (activation): ELU(alpha=1.0, inplace=True)
142
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(16, 16), dilation=(16, 16))
143
+ )
144
+ (pmconv1): Conv2dBlock(
145
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
146
+ (activation): ELU(alpha=1.0, inplace=True)
147
+ (conv): Conv2d(5, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
148
+ )
149
+ (pmconv2_downsample): Conv2dBlock(
150
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
151
+ (activation): ELU(alpha=1.0, inplace=True)
152
+ (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
153
+ )
154
+ (pmconv3): Conv2dBlock(
155
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
156
+ (activation): ELU(alpha=1.0, inplace=True)
157
+ (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
158
+ )
159
+ (pmconv4_downsample): Conv2dBlock(
160
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
161
+ (activation): ELU(alpha=1.0, inplace=True)
162
+ (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
163
+ )
164
+ (pmconv5): Conv2dBlock(
165
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
166
+ (activation): ELU(alpha=1.0, inplace=True)
167
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
168
+ )
169
+ (pmconv6): Conv2dBlock(
170
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
171
+ (activation): ReLU(inplace=True)
172
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
173
+ )
174
+ (contextul_attention): ContextualAttention()
175
+ (pmconv9): Conv2dBlock(
176
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
177
+ (activation): ELU(alpha=1.0, inplace=True)
178
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
179
+ )
180
+ (pmconv10): Conv2dBlock(
181
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
182
+ (activation): ELU(alpha=1.0, inplace=True)
183
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
184
+ )
185
+ (allconv11): Conv2dBlock(
186
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
187
+ (activation): ELU(alpha=1.0, inplace=True)
188
+ (conv): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
189
+ )
190
+ (allconv12): Conv2dBlock(
191
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
192
+ (activation): ELU(alpha=1.0, inplace=True)
193
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
194
+ )
195
+ (allconv13): Conv2dBlock(
196
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
197
+ (activation): ELU(alpha=1.0, inplace=True)
198
+ (conv): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
199
+ )
200
+ (allconv14): Conv2dBlock(
201
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
202
+ (activation): ELU(alpha=1.0, inplace=True)
203
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
204
+ )
205
+ (allconv15): Conv2dBlock(
206
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
207
+ (activation): ELU(alpha=1.0, inplace=True)
208
+ (conv): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
209
+ )
210
+ (allconv16): Conv2dBlock(
211
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
212
+ (activation): ELU(alpha=1.0, inplace=True)
213
+ (conv): Conv2d(32, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
214
+ )
215
+ (allconv17): Conv2dBlock(
216
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
217
+ (conv): Conv2d(16, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
218
+ )
219
+ )
220
+ )
221
+ 2025-02-08 14:20:59,773 INFO
222
+ LocalDis(
223
+ (dis_conv_module): DisConvModule(
224
+ (conv1): Conv2dBlock(
225
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
226
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
227
+ (conv): Conv2d(3, 64, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
228
+ )
229
+ (conv2): Conv2dBlock(
230
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
231
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
232
+ (conv): Conv2d(64, 128, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
233
+ )
234
+ (conv3): Conv2dBlock(
235
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
236
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
237
+ (conv): Conv2d(128, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
238
+ )
239
+ (conv4): Conv2dBlock(
240
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
241
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
242
+ (conv): Conv2d(256, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
243
+ )
244
+ )
245
+ (linear): Linear(in_features=16384, out_features=1, bias=True)
246
+ )
247
+ 2025-02-08 14:20:59,774 INFO
248
+ GlobalDis(
249
+ (dis_conv_module): DisConvModule(
250
+ (conv1): Conv2dBlock(
251
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
252
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
253
+ (conv): Conv2d(3, 64, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
254
+ )
255
+ (conv2): Conv2dBlock(
256
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
257
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
258
+ (conv): Conv2d(64, 128, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
259
+ )
260
+ (conv3): Conv2dBlock(
261
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
262
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
263
+ (conv): Conv2d(128, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
264
+ )
265
+ (conv4): Conv2dBlock(
266
+ (pad): ZeroPad2d(padding=(0, 0, 0, 0), value=0.0)
267
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
268
+ (conv): Conv2d(256, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
269
+ )
270
+ )
271
+ (linear): Linear(in_features=65536, out_features=1, bias=True)
272
+ )
273
+ 2025-02-08 14:20:59,918 INFO Resume from D:\generative-inpainting-pytorch-master\checkpoints\imagenet\hole_benchmark at iteration 430000
274
+ 2025-02-08 14:21:13,818 ERROR one of the variables needed for gradient computation has been modified by an inplace operation: [torch.cuda.FloatTensor [65536, 1]], which is output 0 of TBackward, is at version 3; expected version 2 instead. Hint: enable anomaly detection to find the operation that failed to compute its gradient, with torch.autograd.set_detect_anomaly(True).
checkpoints/imagenet/hole_benchmark/20250427163138491612.log ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-04-27 16:31:38,491 INFO Arguments: Namespace(config='configs/config.yaml', seed=None)
2
+ 2025-04-27 16:31:38,492 INFO Random seed: 6593
3
+ 2025-04-27 16:31:38,493 INFO Configuration: {'dataset_name': 'imagenet', 'data_with_subfolder': True, 'train_data_path': 'traindata/train', 'val_data_path': 'traindata/val', 'resume': 'checkpoints\\imagenet\\hole_benchmark', 'batch_size': 4, 'image_shape': [256, 256, 3], 'mask_shape': [128, 128], 'mask_batch_same': True, 'max_delta_shape': [32, 32], 'margin': [0, 0], 'discounted_mask': True, 'spatial_discounting_gamma': 0.9, 'random_crop': True, 'mask_type': 'hole', 'mosaic_unit_size': 12, 'expname': 'benchmark', 'cuda': 'Ture', 'gpu_ids': [0], 'num_workers': 4, 'lr': 0.0001, 'beta1': 0.5, 'beta2': 0.9, 'n_critic': 5, 'niter': 480000, 'print_iter': 100, 'viz_iter': 1000, 'viz_max_out': 16, 'snapshot_save_iter': 5000, 'coarse_l1_alpha': 1.2, 'l1_loss_alpha': 1.2, 'ae_loss_alpha': 1.2, 'global_wgan_loss_alpha': 1.0, 'gan_loss_alpha': 0.001, 'wgan_gp_lambda': 10, 'netG': {'input_dim': 3, 'ngf': 32}, 'netD': {'input_dim': 3, 'ndf': 64}}
4
+ 2025-04-27 16:31:38,493 INFO Training on dataset: imagenet
5
+ 2025-04-27 16:31:38,953 INFO
6
+ Generator(
7
+ (coarse_generator): CoarseGenerator(
8
+ (conv1): Conv2dBlock(
9
+ (pad): ZeroPad2d((0, 0, 0, 0))
10
+ (activation): ELU(alpha=1.0, inplace=True)
11
+ (conv): Conv2d(5, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
12
+ )
13
+ (conv2_downsample): Conv2dBlock(
14
+ (pad): ZeroPad2d((0, 0, 0, 0))
15
+ (activation): ELU(alpha=1.0, inplace=True)
16
+ (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
17
+ )
18
+ (conv3): Conv2dBlock(
19
+ (pad): ZeroPad2d((0, 0, 0, 0))
20
+ (activation): ELU(alpha=1.0, inplace=True)
21
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
22
+ )
23
+ (conv4_downsample): Conv2dBlock(
24
+ (pad): ZeroPad2d((0, 0, 0, 0))
25
+ (activation): ELU(alpha=1.0, inplace=True)
26
+ (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
27
+ )
28
+ (conv5): Conv2dBlock(
29
+ (pad): ZeroPad2d((0, 0, 0, 0))
30
+ (activation): ELU(alpha=1.0, inplace=True)
31
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
32
+ )
33
+ (conv6): Conv2dBlock(
34
+ (pad): ZeroPad2d((0, 0, 0, 0))
35
+ (activation): ELU(alpha=1.0, inplace=True)
36
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
37
+ )
38
+ (conv7_atrous): Conv2dBlock(
39
+ (pad): ZeroPad2d((0, 0, 0, 0))
40
+ (activation): ELU(alpha=1.0, inplace=True)
41
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(2, 2), dilation=(2, 2))
42
+ )
43
+ (conv8_atrous): Conv2dBlock(
44
+ (pad): ZeroPad2d((0, 0, 0, 0))
45
+ (activation): ELU(alpha=1.0, inplace=True)
46
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(4, 4), dilation=(4, 4))
47
+ )
48
+ (conv9_atrous): Conv2dBlock(
49
+ (pad): ZeroPad2d((0, 0, 0, 0))
50
+ (activation): ELU(alpha=1.0, inplace=True)
51
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(8, 8), dilation=(8, 8))
52
+ )
53
+ (conv10_atrous): Conv2dBlock(
54
+ (pad): ZeroPad2d((0, 0, 0, 0))
55
+ (activation): ELU(alpha=1.0, inplace=True)
56
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(16, 16), dilation=(16, 16))
57
+ )
58
+ (conv11): Conv2dBlock(
59
+ (pad): ZeroPad2d((0, 0, 0, 0))
60
+ (activation): ELU(alpha=1.0, inplace=True)
61
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
62
+ )
63
+ (conv12): Conv2dBlock(
64
+ (pad): ZeroPad2d((0, 0, 0, 0))
65
+ (activation): ELU(alpha=1.0, inplace=True)
66
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
67
+ )
68
+ (conv13): Conv2dBlock(
69
+ (pad): ZeroPad2d((0, 0, 0, 0))
70
+ (activation): ELU(alpha=1.0, inplace=True)
71
+ (conv): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
72
+ )
73
+ (conv14): Conv2dBlock(
74
+ (pad): ZeroPad2d((0, 0, 0, 0))
75
+ (activation): ELU(alpha=1.0, inplace=True)
76
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
77
+ )
78
+ (conv15): Conv2dBlock(
79
+ (pad): ZeroPad2d((0, 0, 0, 0))
80
+ (activation): ELU(alpha=1.0, inplace=True)
81
+ (conv): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
82
+ )
83
+ (conv16): Conv2dBlock(
84
+ (pad): ZeroPad2d((0, 0, 0, 0))
85
+ (activation): ELU(alpha=1.0, inplace=True)
86
+ (conv): Conv2d(32, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
87
+ )
88
+ (conv17): Conv2dBlock(
89
+ (pad): ZeroPad2d((0, 0, 0, 0))
90
+ (conv): Conv2d(16, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
91
+ )
92
+ )
93
+ (fine_generator): FineGenerator(
94
+ (conv1): Conv2dBlock(
95
+ (pad): ZeroPad2d((0, 0, 0, 0))
96
+ (activation): ELU(alpha=1.0, inplace=True)
97
+ (conv): Conv2d(5, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
98
+ )
99
+ (conv2_downsample): Conv2dBlock(
100
+ (pad): ZeroPad2d((0, 0, 0, 0))
101
+ (activation): ELU(alpha=1.0, inplace=True)
102
+ (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
103
+ )
104
+ (conv3): Conv2dBlock(
105
+ (pad): ZeroPad2d((0, 0, 0, 0))
106
+ (activation): ELU(alpha=1.0, inplace=True)
107
+ (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
108
+ )
109
+ (conv4_downsample): Conv2dBlock(
110
+ (pad): ZeroPad2d((0, 0, 0, 0))
111
+ (activation): ELU(alpha=1.0, inplace=True)
112
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
113
+ )
114
+ (conv5): Conv2dBlock(
115
+ (pad): ZeroPad2d((0, 0, 0, 0))
116
+ (activation): ELU(alpha=1.0, inplace=True)
117
+ (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
118
+ )
119
+ (conv6): Conv2dBlock(
120
+ (pad): ZeroPad2d((0, 0, 0, 0))
121
+ (activation): ELU(alpha=1.0, inplace=True)
122
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
123
+ )
124
+ (conv7_atrous): Conv2dBlock(
125
+ (pad): ZeroPad2d((0, 0, 0, 0))
126
+ (activation): ELU(alpha=1.0, inplace=True)
127
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(2, 2), dilation=(2, 2))
128
+ )
129
+ (conv8_atrous): Conv2dBlock(
130
+ (pad): ZeroPad2d((0, 0, 0, 0))
131
+ (activation): ELU(alpha=1.0, inplace=True)
132
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(4, 4), dilation=(4, 4))
133
+ )
134
+ (conv9_atrous): Conv2dBlock(
135
+ (pad): ZeroPad2d((0, 0, 0, 0))
136
+ (activation): ELU(alpha=1.0, inplace=True)
137
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(8, 8), dilation=(8, 8))
138
+ )
139
+ (conv10_atrous): Conv2dBlock(
140
+ (pad): ZeroPad2d((0, 0, 0, 0))
141
+ (activation): ELU(alpha=1.0, inplace=True)
142
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(16, 16), dilation=(16, 16))
143
+ )
144
+ (pmconv1): Conv2dBlock(
145
+ (pad): ZeroPad2d((0, 0, 0, 0))
146
+ (activation): ELU(alpha=1.0, inplace=True)
147
+ (conv): Conv2d(5, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
148
+ )
149
+ (pmconv2_downsample): Conv2dBlock(
150
+ (pad): ZeroPad2d((0, 0, 0, 0))
151
+ (activation): ELU(alpha=1.0, inplace=True)
152
+ (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
153
+ )
154
+ (pmconv3): Conv2dBlock(
155
+ (pad): ZeroPad2d((0, 0, 0, 0))
156
+ (activation): ELU(alpha=1.0, inplace=True)
157
+ (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
158
+ )
159
+ (pmconv4_downsample): Conv2dBlock(
160
+ (pad): ZeroPad2d((0, 0, 0, 0))
161
+ (activation): ELU(alpha=1.0, inplace=True)
162
+ (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
163
+ )
164
+ (pmconv5): Conv2dBlock(
165
+ (pad): ZeroPad2d((0, 0, 0, 0))
166
+ (activation): ELU(alpha=1.0, inplace=True)
167
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
168
+ )
169
+ (pmconv6): Conv2dBlock(
170
+ (pad): ZeroPad2d((0, 0, 0, 0))
171
+ (activation): ReLU(inplace=True)
172
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
173
+ )
174
+ (contextul_attention): ContextualAttention()
175
+ (pmconv9): Conv2dBlock(
176
+ (pad): ZeroPad2d((0, 0, 0, 0))
177
+ (activation): ELU(alpha=1.0, inplace=True)
178
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
179
+ )
180
+ (pmconv10): Conv2dBlock(
181
+ (pad): ZeroPad2d((0, 0, 0, 0))
182
+ (activation): ELU(alpha=1.0, inplace=True)
183
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
184
+ )
185
+ (allconv11): Conv2dBlock(
186
+ (pad): ZeroPad2d((0, 0, 0, 0))
187
+ (activation): ELU(alpha=1.0, inplace=True)
188
+ (conv): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
189
+ )
190
+ (allconv12): Conv2dBlock(
191
+ (pad): ZeroPad2d((0, 0, 0, 0))
192
+ (activation): ELU(alpha=1.0, inplace=True)
193
+ (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
194
+ )
195
+ (allconv13): Conv2dBlock(
196
+ (pad): ZeroPad2d((0, 0, 0, 0))
197
+ (activation): ELU(alpha=1.0, inplace=True)
198
+ (conv): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
199
+ )
200
+ (allconv14): Conv2dBlock(
201
+ (pad): ZeroPad2d((0, 0, 0, 0))
202
+ (activation): ELU(alpha=1.0, inplace=True)
203
+ (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
204
+ )
205
+ (allconv15): Conv2dBlock(
206
+ (pad): ZeroPad2d((0, 0, 0, 0))
207
+ (activation): ELU(alpha=1.0, inplace=True)
208
+ (conv): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
209
+ )
210
+ (allconv16): Conv2dBlock(
211
+ (pad): ZeroPad2d((0, 0, 0, 0))
212
+ (activation): ELU(alpha=1.0, inplace=True)
213
+ (conv): Conv2d(32, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
214
+ )
215
+ (allconv17): Conv2dBlock(
216
+ (pad): ZeroPad2d((0, 0, 0, 0))
217
+ (conv): Conv2d(16, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
218
+ )
219
+ )
220
+ )
221
+ 2025-04-27 16:31:38,953 INFO
222
+ LocalDis(
223
+ (dis_conv_module): DisConvModule(
224
+ (conv1): Conv2dBlock(
225
+ (pad): ZeroPad2d((0, 0, 0, 0))
226
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
227
+ (conv): Conv2d(3, 64, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
228
+ )
229
+ (conv2): Conv2dBlock(
230
+ (pad): ZeroPad2d((0, 0, 0, 0))
231
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
232
+ (conv): Conv2d(64, 128, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
233
+ )
234
+ (conv3): Conv2dBlock(
235
+ (pad): ZeroPad2d((0, 0, 0, 0))
236
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
237
+ (conv): Conv2d(128, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
238
+ )
239
+ (conv4): Conv2dBlock(
240
+ (pad): ZeroPad2d((0, 0, 0, 0))
241
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
242
+ (conv): Conv2d(256, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
243
+ )
244
+ )
245
+ (linear): Linear(in_features=16384, out_features=1, bias=True)
246
+ )
247
+ 2025-04-27 16:31:38,953 INFO
248
+ GlobalDis(
249
+ (dis_conv_module): DisConvModule(
250
+ (conv1): Conv2dBlock(
251
+ (pad): ZeroPad2d((0, 0, 0, 0))
252
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
253
+ (conv): Conv2d(3, 64, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
254
+ )
255
+ (conv2): Conv2dBlock(
256
+ (pad): ZeroPad2d((0, 0, 0, 0))
257
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
258
+ (conv): Conv2d(64, 128, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
259
+ )
260
+ (conv3): Conv2dBlock(
261
+ (pad): ZeroPad2d((0, 0, 0, 0))
262
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
263
+ (conv): Conv2d(128, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
264
+ )
265
+ (conv4): Conv2dBlock(
266
+ (pad): ZeroPad2d((0, 0, 0, 0))
267
+ (activation): LeakyReLU(negative_slope=0.2, inplace=True)
268
+ (conv): Conv2d(256, 256, kernel_size=(5, 5), stride=(2, 2), padding=(2, 2))
269
+ )
270
+ )
271
+ (linear): Linear(in_features=65536, out_features=1, bias=True)
272
+ )
273
+ 2025-04-27 16:31:38,955 ERROR 'NoneType' object has no attribute 'seek'. You can only torch.load from a file that is seekable. Please pre-load the data into a buffer like io.BytesIO and try to load from it instead.
checkpoints/imagenet/hole_benchmark/20250427163636067215.log ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/imagenet/hole_benchmark/config.yaml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # data parameters
2
+ dataset_name: imagenet
3
+ data_with_subfolder: True
4
+
5
+ train_data_path: traindata/train
6
+ val_data_path: traindata/val
7
+ resume: checkpoints/imagenet/hole_benchmark
8
+
9
+
10
+ batch_size: 4
11
+ image_shape: [256, 256, 3]
12
+ mask_shape: [128, 128]
13
+ mask_batch_same: True
14
+ max_delta_shape: [32, 32]
15
+ margin: [0, 0]
16
+ discounted_mask: True
17
+ spatial_discounting_gamma: 0.9
18
+ random_crop: True
19
+ mask_type: hole # hole | mosaic
20
+ mosaic_unit_size: 12
21
+
22
+ # training parameters
23
+ expname: benchmark
24
+ cuda: Ture
25
+ gpu_ids: [0] # set the GPU ids to use, e.g. [0] or [1, 2]
26
+ num_workers: 4
27
+ lr: 0.0001
28
+ beta1: 0.5
29
+ beta2: 0.9
30
+ n_critic: 5
31
+ niter: 480000
32
+ print_iter: 100
33
+ viz_iter: 1000
34
+ viz_max_out: 16
35
+ snapshot_save_iter: 5000
36
+
37
+ # loss weight
38
+ coarse_l1_alpha: 1.2
39
+ l1_loss_alpha: 1.2
40
+ ae_loss_alpha: 1.2
41
+ global_wgan_loss_alpha: 1.
42
+ gan_loss_alpha: 0.001
43
+ wgan_gp_lambda: 10
44
+
45
+ # network parameters
46
+ netG:
47
+ input_dim: 3
48
+ ngf: 32
49
+
50
+ netD:
51
+ input_dim: 3
52
+ ndf: 64
checkpoints/imagenet/hole_benchmark/niter_470000.png ADDED

Git LFS Details

  • SHA256: a53c5eb85eec6a8aaf2799441047ba6a32a6a592184653f45e88157509ece791
  • Pointer size: 131 Bytes
  • Size of remote file: 322 kB
checkpoints/imagenet/hole_benchmark/niter_471000.png ADDED

Git LFS Details

  • SHA256: 766c5f5174ca6383ffc389fca60f66a20b1b1a715ce60cc05b5d8bf58cf9c1fd
  • Pointer size: 131 Bytes
  • Size of remote file: 422 kB
checkpoints/imagenet/hole_benchmark/niter_472000.png ADDED

Git LFS Details

  • SHA256: a5c18aa6da84eef84c3b39432e3acb3d1de52ea3ab4370104f3bce176bc02d3d
  • Pointer size: 131 Bytes
  • Size of remote file: 314 kB
checkpoints/imagenet/hole_benchmark/niter_473000.png ADDED

Git LFS Details

  • SHA256: 29b37aba6ff348f829884b22c275d00829d4eb3340f04cbe0f2ffba11cf6d5be
  • Pointer size: 131 Bytes
  • Size of remote file: 294 kB
checkpoints/imagenet/hole_benchmark/niter_474000.png ADDED

Git LFS Details

  • SHA256: 69730cc990b4b554a1899e64287b7e95fc7617082f3ca3047919588e1e962ce2
  • Pointer size: 131 Bytes
  • Size of remote file: 317 kB
checkpoints/imagenet/hole_benchmark/niter_475000.png ADDED

Git LFS Details

  • SHA256: 0e712e2fe8aa5cef68b7edf1ba8a3d0cf4932ca79d4b8fe1399e8cf0ee5c4b17
  • Pointer size: 131 Bytes
  • Size of remote file: 471 kB
checkpoints/imagenet/hole_benchmark/niter_476000.png ADDED

Git LFS Details

  • SHA256: d34caaec34970e922fc37e516ea0da0fe0355259834b4bd154982998f032148c
  • Pointer size: 131 Bytes
  • Size of remote file: 288 kB
checkpoints/imagenet/hole_benchmark/niter_477000.png ADDED

Git LFS Details

  • SHA256: 868890f8ef06b7dbb8acdb40ab6265c17ae2db764a1df0ab563af68f999670bd
  • Pointer size: 131 Bytes
  • Size of remote file: 266 kB
checkpoints/imagenet/hole_benchmark/niter_478000.png ADDED

Git LFS Details

  • SHA256: af6a6bea3997d3890b12b9a26eaa337cf47e63f1328fa96709cb7dc6ed5ef55c
  • Pointer size: 131 Bytes
  • Size of remote file: 292 kB
checkpoints/imagenet/hole_benchmark/niter_479000.png ADDED

Git LFS Details

  • SHA256: 484d04a68f0d892403d7a924e64fede488e95f6a96572e53ccc2ded4832c7a2c
  • Pointer size: 131 Bytes
  • Size of remote file: 279 kB
checkpoints/imagenet/hole_benchmark/niter_480000.png ADDED

Git LFS Details

  • SHA256: 45c45798c929c7c205645ba9e691dca4026059bd69ca9aa5f7d72e6f2f330f62
  • Pointer size: 131 Bytes
  • Size of remote file: 283 kB
data/content/1.jpg ADDED

Git LFS Details

  • SHA256: 64ce2f47ec360f18fb93df97070b6e8d1ab3531c128a56273b5f39e1c4b17ed5
  • Pointer size: 131 Bytes
  • Size of remote file: 197 kB
data/content/11.jpg ADDED

Git LFS Details

  • SHA256: fd5163bd80be61a09325d2bfda8628eb1d54f13a114a7c1cf3eea2f41e8326fd
  • Pointer size: 131 Bytes
  • Size of remote file: 518 kB
data/content/13.png ADDED

Git LFS Details

  • SHA256: ab7b6fe2add831fb75ec0ae296258ada73dc62321b2ad566618b0dbd7a68d07b
  • Pointer size: 131 Bytes
  • Size of remote file: 473 kB
data/content/14.jpg ADDED

Git LFS Details

  • SHA256: 7a8b99dd3995c7f91abfb79bd1228906980ef95540807b7f9246d7795efb761c
  • Pointer size: 131 Bytes
  • Size of remote file: 501 kB
data/content/16.jpg ADDED

Git LFS Details

  • SHA256: 4fbcb78cdf062f8c202eb798a5c39db8d24919c4c5d0d9783d8cea24d1f38139
  • Pointer size: 131 Bytes
  • Size of remote file: 299 kB
data/content/3.jpg ADDED

Git LFS Details

  • SHA256: 4868d2c34009dfc6e97f21014ce062053e6d9b7b5a7cc4b55813428dc73e612d
  • Pointer size: 131 Bytes
  • Size of remote file: 368 kB
data/content/5.png ADDED

Git LFS Details

  • SHA256: 2cb74ee3ee9bb1df6129fd110e59868466f1e59ae42c53d54568d3055c08e155
  • Pointer size: 131 Bytes
  • Size of remote file: 255 kB
data/content/6.png ADDED

Git LFS Details

  • SHA256: e87b44d01673e4c21d9de1c8866e42a106d459a03666d9311297d0234b205f79
  • Pointer size: 131 Bytes
  • Size of remote file: 275 kB
data/content/8.jpg ADDED

Git LFS Details

  • SHA256: 30c7b6b9b5193315343389337a51a658eee0c4e98b0117ff2e7872a85103c54f
  • Pointer size: 131 Bytes
  • Size of remote file: 496 kB
data/content/9.jpg ADDED

Git LFS Details

  • SHA256: fd73f0c3358ec3bfae164e6b99eb7817fe73d3ba51cc3768f7ec10bd964e4d6c
  • Pointer size: 131 Bytes
  • Size of remote file: 343 kB
data/content/deer.jpg ADDED

Git LFS Details

  • SHA256: d496294c2d369117911e619bf766595f72af7c2376ad058e9e28a2c6fba600af
  • Pointer size: 131 Bytes
  • Size of remote file: 209 kB
data/style/1.jpg ADDED

Git LFS Details

  • SHA256: 99a9063a87d584ef8be1876c5440150325104b3a9c43e775423e419b23a2ba0d
  • Pointer size: 131 Bytes
  • Size of remote file: 135 kB
data/style/1.png ADDED

Git LFS Details

  • SHA256: 6706e4d3212fc7a6f58c6217990ef9021fba81873883f9463ca7058307fc43fb
  • Pointer size: 131 Bytes
  • Size of remote file: 420 kB
data/style/10.jpg ADDED

Git LFS Details

  • SHA256: 0fc9cdcedd50280429fb4a971adc5c347e3b3620b2c5276bb47a8a4eec50b68f
  • Pointer size: 131 Bytes
  • Size of remote file: 359 kB
data/style/12.jpg ADDED

Git LFS Details

  • SHA256: d8c3053f9da9370d86d96c1b169fce042f684f432e594fb9e611237bb4e31af7
  • Pointer size: 131 Bytes
  • Size of remote file: 493 kB
data/style/23.png ADDED

Git LFS Details

  • SHA256: a407b3188668743d948215972fbedda5503fb2e15da9ad21d52f830b37b21367
  • Pointer size: 131 Bytes
  • Size of remote file: 349 kB
data/style/3.jpg ADDED

Git LFS Details

  • SHA256: 319bb924387949851349dc70cb5f8edb58c002e5ebfdb5854b77a4061900c1c1
  • Pointer size: 131 Bytes
  • Size of remote file: 290 kB
data/style/5.jpg ADDED

Git LFS Details

  • SHA256: ed15d4bae215beaaafdcc1caab6d202aca82e51fa8a42cf791a4704290bfd304
  • Pointer size: 131 Bytes
  • Size of remote file: 225 kB