codeShare commited on
Commit
b7c4ce7
Β·
verified Β·
1 Parent(s): c56de2f

Upload fusion_t2i_CLIP_interrogator.ipynb

Browse files
Google Colab Jupyter Notebooks/fusion_t2i_CLIP_interrogator.ipynb CHANGED
@@ -28,7 +28,7 @@
28
  {
29
  "cell_type": "code",
30
  "source": [
31
- "# @title βš„ Initialize\n",
32
  "\n",
33
  "import os\n",
34
  "home_directory = '/content/'\n",
@@ -91,32 +91,16 @@
91
  "#------#\n",
92
  "dot_dtype = torch.float32\n",
93
  "dim = 768\n",
94
- "ref = torch.zeros(dim).to(dtype = dot_dtype)"
95
- ],
96
- "metadata": {
97
- "id": "TC5lMJrS1HCC",
98
- "cellView": "form"
99
- },
100
- "execution_count": null,
101
- "outputs": []
102
- },
103
- {
104
- "cell_type": "markdown",
105
- "source": [
106
- "The visualization has no effect on the output. It will only be used if you enable the 'Show encoding' checkbox"
107
- ],
108
- "metadata": {
109
- "id": "OpOoRmaP3u2H"
110
- }
111
- },
112
- {
113
- "cell_type": "code",
114
- "source": [
115
- "# @title βš„ Define parameters for visalizing the reference in a 16x16 grid <br> (the visualization settings has no effect on output)\n",
116
  "from PIL import Image, ImageDraw\n",
117
- "SCALE = 0.0002 # @param {type:\"slider\", min:0.0001, max:0.001, step:0.00001}\n",
118
- "ZERO_POINT = 100 # @param {type:\"slider\", min:0, max:300, step:1}\n",
119
  "CELL_SIZE = 16\n",
 
 
 
120
  "\n",
121
  "BORDER_THICKNESS = 4\n",
122
  "\n",
@@ -154,9 +138,7 @@
154
  "try: ref\n",
155
  "except: ref = torch.zeros(dim).to(dtype = dot_dtype)\n",
156
  "\n",
157
- "image_size = 0.5 # @param {type:\"slider\", min:0, max:1, step:0.01}\n",
158
- "show_encoding = True # @param {type:\"boolean\"}\n",
159
- "#------#\n",
160
  "if show_encoding:\n",
161
  " # create figure\n",
162
  " fig = plt.figure(figsize=(10*image_size, 10*image_size))\n",
@@ -177,12 +159,21 @@
177
  "print(f'Using settings SCALE = {SCALE} and ZERO_POINT = {ZERO_POINT} for visualizing the text_encoding')"
178
  ],
179
  "metadata": {
180
- "id": "YDu8XlehhWID",
181
  "cellView": "form"
182
  },
183
  "execution_count": null,
184
  "outputs": []
185
  },
 
 
 
 
 
 
 
 
 
186
  {
187
  "cell_type": "markdown",
188
  "source": [
@@ -196,7 +187,7 @@
196
  {
197
  "cell_type": "code",
198
  "source": [
199
- "\n",
200
  "# @markdown πŸ“ Write a text prompt (this will overwrite any savefile already stored)\n",
201
  "NEW_ENCODING = '' # @param {type:'string' ,placeholder:'write a prompt'}\n",
202
  "enable = True # @param {type:\"boolean\"}\n",
@@ -301,7 +292,8 @@
301
  "\n"
302
  ],
303
  "metadata": {
304
- "id": "Oxi6nOyrUTAe"
 
305
  },
306
  "execution_count": null,
307
  "outputs": []
@@ -318,7 +310,7 @@
318
  {
319
  "cell_type": "code",
320
  "source": [
321
- "\n",
322
  "loaded_ref = False\n",
323
  "try:\n",
324
  " ref\n",
@@ -386,7 +378,8 @@
386
  " #------#\n"
387
  ],
388
  "metadata": {
389
- "id": "BwrEs5zVB0Sb"
 
390
  },
391
  "execution_count": null,
392
  "outputs": []
@@ -403,7 +396,7 @@
403
  {
404
  "cell_type": "code",
405
  "source": [
406
- "\n",
407
  "loaded_ref = False\n",
408
  "try:\n",
409
  " ref\n",
@@ -464,7 +457,8 @@
464
  " #------#"
465
  ],
466
  "metadata": {
467
- "id": "IqUsiQw2HU2C"
 
468
  },
469
  "execution_count": null,
470
  "outputs": []
@@ -481,7 +475,7 @@
481
  {
482
  "cell_type": "code",
483
  "source": [
484
- "\n",
485
  "loaded_ref = False\n",
486
  "try:\n",
487
  " ref\n",
@@ -546,7 +540,8 @@
546
  " #------#"
547
  ],
548
  "metadata": {
549
- "id": "I_-GOwFPKkha"
 
550
  },
551
  "execution_count": null,
552
  "outputs": []
@@ -563,7 +558,7 @@
563
  {
564
  "cell_type": "code",
565
  "source": [
566
- "# @title βš„ Save the reference\n",
567
  "\n",
568
  "loaded_ref = False\n",
569
  "try:\n",
@@ -624,7 +619,7 @@
624
  {
625
  "cell_type": "code",
626
  "source": [
627
- "# @title βš„ CLIP Interrogator\n",
628
  "LIST_SIZE = 1000 # @param {type:'number' , placeholder:'set how large the list should be'}\n",
629
  "_START_AT = '0' # @param [\"0\", \"10000\", \"50000\"] {allow-input: true}\n",
630
  "START_AT = 0\n",
@@ -817,7 +812,8 @@
817
  "image\n"
818
  ],
819
  "metadata": {
820
- "id": "kOYZ8Ajn-DD8"
 
821
  },
822
  "execution_count": null,
823
  "outputs": []
@@ -836,7 +832,7 @@
836
  {
837
  "cell_type": "code",
838
  "source": [
839
- "# @title βš„ Evaluate similarities\n",
840
  "%cd {output_folder_sims}\n",
841
  "index = 0\n",
842
  "for filename in os.listdir(output_folder_sims):\n",
@@ -912,7 +908,8 @@
912
  "plt.show()"
913
  ],
914
  "metadata": {
915
- "id": "ln6DsZPG99ez"
 
916
  },
917
  "execution_count": null,
918
  "outputs": []
 
28
  {
29
  "cell_type": "code",
30
  "source": [
31
+ "# @title βš„ πŸ”„ Initialize\n",
32
  "\n",
33
  "import os\n",
34
  "home_directory = '/content/'\n",
 
91
  "#------#\n",
92
  "dot_dtype = torch.float32\n",
93
  "dim = 768\n",
94
+ "ref = torch.zeros(dim).to(dtype = dot_dtype)\n",
95
+ "\n",
96
+ "# title βš„ Define parameters for visalizing the reference in a 16x16 grid <br> (the visualization settings has no effect on output)\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
  "from PIL import Image, ImageDraw\n",
98
+ "SCALE = 0.0002 # param {type:\"slider\", min:0.0001, max:0.001, step:0.00001}\n",
99
+ "ZERO_POINT = 100 # param {type:\"slider\", min:0, max:300, step:1}\n",
100
  "CELL_SIZE = 16\n",
101
+ "image_size = 0.5 # param {type:\"slider\", min:0, max:1, step:0.01}\n",
102
+ "show_encoding = False # param {type:\"boolean\"}\n",
103
+ "#------#\n",
104
  "\n",
105
  "BORDER_THICKNESS = 4\n",
106
  "\n",
 
138
  "try: ref\n",
139
  "except: ref = torch.zeros(dim).to(dtype = dot_dtype)\n",
140
  "\n",
141
+ "\n",
 
 
142
  "if show_encoding:\n",
143
  " # create figure\n",
144
  " fig = plt.figure(figsize=(10*image_size, 10*image_size))\n",
 
159
  "print(f'Using settings SCALE = {SCALE} and ZERO_POINT = {ZERO_POINT} for visualizing the text_encoding')"
160
  ],
161
  "metadata": {
162
+ "id": "TC5lMJrS1HCC",
163
  "cellView": "form"
164
  },
165
  "execution_count": null,
166
  "outputs": []
167
  },
168
+ {
169
+ "cell_type": "markdown",
170
+ "source": [
171
+ "The visualization has no effect on the output. It will only be used if you enable the 'Show encoding' checkbox"
172
+ ],
173
+ "metadata": {
174
+ "id": "OpOoRmaP3u2H"
175
+ }
176
+ },
177
  {
178
  "cell_type": "markdown",
179
  "source": [
 
187
  {
188
  "cell_type": "code",
189
  "source": [
190
+ "# @title βš„ 🧩 Create an encoding\n",
191
  "# @markdown πŸ“ Write a text prompt (this will overwrite any savefile already stored)\n",
192
  "NEW_ENCODING = '' # @param {type:'string' ,placeholder:'write a prompt'}\n",
193
  "enable = True # @param {type:\"boolean\"}\n",
 
292
  "\n"
293
  ],
294
  "metadata": {
295
+ "id": "Oxi6nOyrUTAe",
296
+ "cellView": "form"
297
  },
298
  "execution_count": null,
299
  "outputs": []
 
310
  {
311
  "cell_type": "code",
312
  "source": [
313
+ "# @title βš„ πŸ“·πŸ’­ Use pre-encoded image+prompt pair\n",
314
  "loaded_ref = False\n",
315
  "try:\n",
316
  " ref\n",
 
378
  " #------#\n"
379
  ],
380
  "metadata": {
381
+ "id": "BwrEs5zVB0Sb",
382
+ "cellView": "form"
383
  },
384
  "execution_count": null,
385
  "outputs": []
 
396
  {
397
  "cell_type": "code",
398
  "source": [
399
+ "# @title βš„ πŸŒπŸ–ΌοΈ Load an image via URL\n",
400
  "loaded_ref = False\n",
401
  "try:\n",
402
  " ref\n",
 
457
  " #------#"
458
  ],
459
  "metadata": {
460
+ "id": "IqUsiQw2HU2C",
461
+ "cellView": "form"
462
  },
463
  "execution_count": null,
464
  "outputs": []
 
475
  {
476
  "cell_type": "code",
477
  "source": [
478
+ "# @title βš„ πŸ“‚πŸ–ΌοΈ Use an uploaded image as reference\n",
479
  "loaded_ref = False\n",
480
  "try:\n",
481
  " ref\n",
 
540
  " #------#"
541
  ],
542
  "metadata": {
543
+ "id": "I_-GOwFPKkha",
544
+ "cellView": "form"
545
  },
546
  "execution_count": null,
547
  "outputs": []
 
558
  {
559
  "cell_type": "code",
560
  "source": [
561
+ "# @title βš„ πŸ’Ύ Save the reference\n",
562
  "\n",
563
  "loaded_ref = False\n",
564
  "try:\n",
 
619
  {
620
  "cell_type": "code",
621
  "source": [
622
+ "# @title βš„ πŸ•΅οΈβ€β™‚οΈ Run the CLIP Interrogator\n",
623
  "LIST_SIZE = 1000 # @param {type:'number' , placeholder:'set how large the list should be'}\n",
624
  "_START_AT = '0' # @param [\"0\", \"10000\", \"50000\"] {allow-input: true}\n",
625
  "START_AT = 0\n",
 
812
  "image\n"
813
  ],
814
  "metadata": {
815
+ "id": "kOYZ8Ajn-DD8",
816
+ "cellView": "form"
817
  },
818
  "execution_count": null,
819
  "outputs": []
 
832
  {
833
  "cell_type": "code",
834
  "source": [
835
+ "# @title βš„ πŸ” Evaluate similarities\n",
836
  "%cd {output_folder_sims}\n",
837
  "index = 0\n",
838
  "for filename in os.listdir(output_folder_sims):\n",
 
908
  "plt.show()"
909
  ],
910
  "metadata": {
911
+ "id": "ln6DsZPG99ez",
912
+ "cellView": "form"
913
  },
914
  "execution_count": null,
915
  "outputs": []