Datasets:
Tags:
Not-For-All-Audiences
Upload prepare_genma4_lora_set.ipynb
Browse files
prepare_genma4_lora_set.ipynb
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"provenance":[],"gpuType":"T4"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"accelerator":"GPU","widgets":{"application/vnd.jupyter.widget-state+json":{"3c25b0c5e7b34d9aa78f314da1fd5328":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_60002b8998354ebc8bd46b9944923706","IPY_MODEL_3bb5d8c23a5c4e30afa9635712c8284b","IPY_MODEL_b59bd73a85d4495881611517a6172a23"],"layout":"IPY_MODEL_36d1ad477ef148cd9950cb34b60d2dc0"}},"60002b8998354ebc8bd46b9944923706":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b0d1b26cd980495d8d3095d061863791","placeholder":"β","style":"IPY_MODEL_d0dd672bfb3043a7acd8f910be78f974","value":"config.json:β100%"}},"3bb5d8c23a5c4e30afa9635712c8284b":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_afc0f93b25274d71a23022ef21bdaf9d","max":684,"min":0,"orientation":"horizontal","style":"IPY_MODEL_3bf8365d694d474fba3790c1f6148837","value":684}},"b59bd73a85d4495881611517a6172a23":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e45a44f7586b45da89bf2422fe967287","placeholder":"β","style":"IPY_MODEL_b874a7adab054da186693cc6c3871125","value":"β684/684β[00:00<00:00,β72.8kB/s]"}},"36d1ad477ef148cd9950cb34b60d2dc0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b0d1b26cd980495d8d3095d061863791":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d0dd672bfb3043a7acd8f910be78f974":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"afc0f93b25274d71a23022ef21bdaf9d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3bf8365d694d474fba3790c1f6148837":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"e45a44f7586b45da89bf2422fe967287":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b874a7adab054da186693cc6c3871125":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"31835d32eeb248d5a7a51e38d3242de5":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_17ea9ef65c714aecbf5efa6ce379879b","IPY_MODEL_21897a0c643544cbae4187684a2c2547","IPY_MODEL_e3b6ecb51cfe48269053f8bae6ec25ac"],"layout":"IPY_MODEL_42f51d9d165446fda29e864f9ecc5742"}},"17ea9ef65c714aecbf5efa6ce379879b":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c15a67dcf266407ab4f59c56afb48c12","placeholder":"β","style":"IPY_MODEL_c03426b1ded94da1956ac9468afb26af","value":"model.safetensors:β100%"}},"21897a0c643544cbae4187684a2c2547":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_d4230769e9d643469757dd4205eba135","max":378417260,"min":0,"orientation":"horizontal","style":"IPY_MODEL_8ff6a50c557f4d1cad1747e65bd219a4","value":378417260}},"e3b6ecb51cfe48269053f8bae6ec25ac":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_bd813d3e0fb841cd98f178d58c1fcb8a","placeholder":"β","style":"IPY_MODEL_4c25ed721d704f69912307b21ff37e3f","value":"β378M/378Mβ[00:06<00:00,β50.8MB/s]"}},"42f51d9d165446fda29e864f9ecc5742":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c15a67dcf266407ab4f59c56afb48c12":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c03426b1ded94da1956ac9468afb26af":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d4230769e9d643469757dd4205eba135":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8ff6a50c557f4d1cad1747e65bd219a4":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"bd813d3e0fb841cd98f178d58c1fcb8a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4c25ed721d704f69912307b21ff37e3f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}}},"cells":[{"cell_type":"code","source":["#@markdown # Cell 0: Mount Google Drive & Prepare HF_TOKEN for faster HF downloads\n","from google.colab import drive\n","from google.colab import userdata\n","import os\n","\n","# Mount Google Drive\n","print(\"Mounting Google Drive...\")\n","drive.mount('/content/drive', force_remount=True)\n","\n","# Create a working directory on Drive\n","WORKING_DIR = \"/content/drive/MyDrive/DinoTaggerPipeline\"\n","os.makedirs(WORKING_DIR, exist_ok=True)\n","print(f\"β
Working directory set to: {WORKING_DIR}\")\n","\n","# HF_TOKEN from Colab Secrets (recommended for private/gated models and faster downloads)\n","hf_token = userdata.get('HF_TOKEN')\n","if hf_token:\n"," os.environ[\"HF_TOKEN\"] = hf_token\n"," print(\"β
HF_TOKEN loaded from Colab Secrets and set as environment variable.\")\n"," print(\" β This enables authenticated + faster/resumable Hugging Face downloads.\")\n","else:\n"," print(\"β οΈ HF_TOKEN not found in Colab Secrets.\")\n"," print(\" β Add it via the key icon (left sidebar) β Secrets β Name: HF_TOKEN\")\n"," print(\" β Some downloads may be slower or fail if the repo requires login.\")\n","\n","print(\"\\nβ
Cell 0 complete. Ready for model downloads.\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"qZ9aJ-2OF1Cp","executionInfo":{"status":"ok","timestamp":1775407346807,"user_tz":-120,"elapsed":20307,"user":{"displayName":"fukU Google","userId":"02763165356193834046"}},"outputId":"f8a84278-1750-4af0-c0d3-dc87a6cf787f"},"execution_count":1,"outputs":[{"output_type":"stream","name":"stdout","text":["Mounting Google Drive...\n","Mounted at /content/drive\n","β
Working directory set to: /content/drive/MyDrive/DinoTaggerPipeline\n","β
HF_TOKEN loaded from Colab Secrets and set as environment variable.\n"," β This enables authenticated + faster/resumable Hugging Face downloads.\n","\n","β
Cell 0 complete. Ready for model downloads.\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 1: Download All Models to Disk (CPU-only, No VRAM Load)\n","import torch\n","from huggingface_hub import hf_hub_download\n","from pathlib import Path\n","import os\n","\n","print(\"Downloading models to disk (CPU-only, resumable)...\")\n","\n","# 1. DINO Tagger (lodestones/tagger-experiment)\n","print(\"\\n1. Downloading DINO Tagger files...\")\n","hf_hub_download(repo_id=\"lodestones/tagger-experiment\", filename=\"tagger_proto.safetensors\", local_dir=WORKING_DIR, local_dir_use_symlinks=False)\n","hf_hub_download(repo_id=\"lodestones/tagger-experiment\", filename=\"tagger_vocab_with_categories.json\", local_dir=WORKING_DIR, local_dir_use_symlinks=False)\n","hf_hub_download(repo_id=\"lodestones/tagger-experiment\", filename=\"inference_tagger_standalone.py\", local_dir=WORKING_DIR, local_dir_use_symlinks=False)\n","\n","# 2. WD-VIT Tagger (selected_tags.csv)\n","print(\"\\n2. Downloading WD-VIT Tagger assets...\")\n","tags_path = Path(WORKING_DIR) / \"selected_tags.csv\"\n","if not tags_path.exists():\n"," import requests\n"," from io import StringIO\n"," import pandas as pd\n"," response = requests.get(\"https://huggingface.co/SmilingWolf/wd-vit-tagger-v3/resolve/main/selected_tags.csv\")\n"," tags_df = pd.read_csv(StringIO(response.text))\n"," tags_df.to_csv(tags_path, index=False)\n"," print(f\" β Saved {len(tags_df)} tags to selected_tags.csv\")\n","\n","print(f\"\\nβ
All lightweight models downloaded/cached to: {WORKING_DIR}\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Mo3kn8KrF2nz","executionInfo":{"status":"ok","timestamp":1775400155772,"user_tz":-120,"elapsed":7536,"user":{"displayName":"No Name","userId":"10578412414437288386"}},"outputId":"9a4ad26d-06f5-4ffc-ebea-7142a54fbdb3"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Downloading models to disk (CPU-only, resumable)...\n","\n","1. Downloading DINO Tagger files...\n"]},{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_validators.py:206: UserWarning: The `local_dir_use_symlinks` argument is deprecated and ignored in `hf_hub_download`. Downloading to a local directory does not use symlinks anymore.\n"," warnings.warn(\n"]},{"output_type":"stream","name":"stdout","text":["\n","2. Downloading WD-VIT Tagger assets...\n","\n","β
All lightweight models downloaded/cached to: /content/drive/MyDrive/DinoTaggerPipeline\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 2: Unpack gemma4_training_set_preliminary.zip (Image + Text Pairs)\n","from pathlib import Path\n","import zipfile\n","import json\n","\n","# === Settings ===\n","zip_file_path = \"/content/drive/MyDrive/gemma4_training_set_preliminary.zip\" #@param {type:\"string\"}\n","\n","extract_dir = Path(\"/content/preliminary_pairs\")\n","extract_dir.mkdir(exist_ok=True)\n","\n","if not Path(zip_file_path).exists():\n"," print(f\"β ZIP file not found at: {zip_file_path}\")\n","else:\n"," print(f\"Extracting image-text pairs from: {zip_file_path}\")\n"," with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:\n"," members = [m for m in zip_ref.namelist() if not m.startswith('__MACOSX/') and not m.endswith('.DS_Store')]\n"," for member in members:\n"," zip_ref.extract(member, extract_dir)\n"," print(\" β Ignored __MACOSX/ and .DS_Store files.\")\n","\n"," # Collect all images\n"," image_files = []\n"," for ext in ['.png', '.jpg', '.jpeg', '.webp', '.avif', '.bmp']:\n"," image_files.extend(list(extract_dir.rglob(f\"*{ext}\")))\n"," image_files.extend(list(extract_dir.rglob(f\"*{ext.upper()}\")))\n"," image_files = sorted(set(str(p) for p in image_files))\n","\n"," print(f\"β
Extracted {len(image_files)} image(s).\")\n","\n"," # Build mapping for original captions (preserves exact order)\n"," caption_map = {}\n"," image_stems_in_order = []\n"," for p_str in image_files:\n"," img_path = Path(p_str)\n"," stem = img_path.stem\n"," image_stems_in_order.append(stem)\n"," txt_path = img_path.with_suffix('.txt')\n"," if txt_path.exists():\n"," with open(txt_path, \"r\", encoding=\"utf-8\") as f:\n"," caption_map[stem] = f.read().strip()\n"," else:\n"," caption_map[stem] = \"\"\n","\n"," # Save for later cells\n"," with open(\"/content/image_stems_order.json\", \"w\", encoding=\"utf-8\") as f:\n"," json.dump(image_stems_in_order, f)\n"," with open(\"/content/caption_map.json\", \"w\", encoding=\"utf-8\") as f:\n"," json.dump(caption_map, f)\n","\n"," print(f\"β
Loaded {len([v for v in caption_map.values() if v])} original captions.\")\n"," print(\" Image-text pairs are ready in /content/preliminary_pairs/\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"tyrLWjjXGHcy","executionInfo":{"status":"ok","timestamp":1775400175885,"user_tz":-120,"elapsed":3891,"user":{"displayName":"No Name","userId":"10578412414437288386"}},"outputId":"26ac8a87-f4d2-4366-91ef-8054cce76f3f"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Extracting image-text pairs from: /content/drive/MyDrive/gemma4_training_set_preliminary.zip\n"," β Ignored __MACOSX/ and .DS_Store files.\n","β
Extracted 270 image(s).\n","β
Loaded 270 original captions.\n"," Image-text pairs are ready in /content/preliminary_pairs/\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 3: Clear VRAM + Load WD-VIT Tagger v3\n","import torch\n","import gc\n","import timm\n","import torchvision.transforms as transforms\n","import pandas as pd\n","from pathlib import Path\n","\n","# Clear VRAM\n","print(\"π§Ή Clearing VRAM...\")\n","if 'model' in globals(): del model\n","if 'tagger' in globals(): del tagger\n","torch.cuda.empty_cache()\n","gc.collect()\n","print(\" β VRAM cleared.\")\n","\n","# Load WD-VIT Tagger\n","print(\"\\nπ₯ Loading WD-VIT Tagger v3...\")\n","tags_path = Path(WORKING_DIR) / \"selected_tags.csv\"\n","tags_df = pd.read_csv(tags_path)\n","tags_list = tags_df['name'].tolist()\n","print(f\" β Loaded {len(tags_list)} tags.\")\n","\n","model = timm.create_model(\"hf_hub:SmilingWolf/wd-vit-tagger-v3\", pretrained=True)\n","model.eval()\n","device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n","model = model.to(device)\n","\n","preprocess = transforms.Compose([\n"," transforms.Resize((448, 448)),\n"," transforms.ToTensor(),\n"," transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n","])\n","\n","print(f\"β
WD-VIT Tagger loaded on {device}\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":209,"referenced_widgets":["3c25b0c5e7b34d9aa78f314da1fd5328","60002b8998354ebc8bd46b9944923706","3bb5d8c23a5c4e30afa9635712c8284b","b59bd73a85d4495881611517a6172a23","36d1ad477ef148cd9950cb34b60d2dc0","b0d1b26cd980495d8d3095d061863791","d0dd672bfb3043a7acd8f910be78f974","afc0f93b25274d71a23022ef21bdaf9d","3bf8365d694d474fba3790c1f6148837","e45a44f7586b45da89bf2422fe967287","b874a7adab054da186693cc6c3871125","31835d32eeb248d5a7a51e38d3242de5","17ea9ef65c714aecbf5efa6ce379879b","21897a0c643544cbae4187684a2c2547","e3b6ecb51cfe48269053f8bae6ec25ac","42f51d9d165446fda29e864f9ecc5742","c15a67dcf266407ab4f59c56afb48c12","c03426b1ded94da1956ac9468afb26af","d4230769e9d643469757dd4205eba135","8ff6a50c557f4d1cad1747e65bd219a4","bd813d3e0fb841cd98f178d58c1fcb8a","4c25ed721d704f69912307b21ff37e3f"]},"id":"-ZLGZUMZGMmW","executionInfo":{"status":"ok","timestamp":1775400218526,"user_tz":-120,"elapsed":18957,"user":{"displayName":"No Name","userId":"10578412414437288386"}},"outputId":"f9f4a4bb-6604-4dd4-adcd-a301bf33c654"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["π§Ή Clearing VRAM...\n"," β VRAM cleared.\n","\n","π₯ Loading WD-VIT Tagger v3...\n"," β Loaded 10861 tags.\n"]},{"output_type":"display_data","data":{"text/plain":["config.json: 0%| | 0.00/684 [00:00<?, ?B/s]"],"application/vnd.jupyter.widget-view+json":{"version_major":2,"version_minor":0,"model_id":"3c25b0c5e7b34d9aa78f314da1fd5328"}},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["model.safetensors: 0%| | 0.00/378M [00:00<?, ?B/s]"],"application/vnd.jupyter.widget-view+json":{"version_major":2,"version_minor":0,"model_id":"31835d32eeb248d5a7a51e38d3242de5"}},"metadata":{}},{"output_type":"stream","name":"stdout","text":["β
WD-VIT Tagger loaded on cuda\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 4: Tag Images with WD-VIT Tagger\n","from PIL import Image\n","from pathlib import Path\n","\n","threshold = 0.7 #@param {type:\"slider\", min:0.1, max:0.95, step:0.01}\n","add_commas = True #@param {type:\"boolean\"}\n","\n","input_dir = Path(\"/content/preliminary_pairs\")\n","output_dir = Path(\"/content/wd_tags\")\n","output_dir.mkdir(exist_ok=True)\n","\n","image_files = sorted(list(input_dir.glob(\"*.*\")))\n","image_files = [f for f in image_files if f.suffix.lower() in {'.png','.jpg','.jpeg','.webp','.bmp','.avif'}]\n","\n","print(f\"Starting WD tagging for {len(image_files)} images...\")\n","\n","for i, img_path in enumerate(image_files, start=1):\n"," img_name = img_path.name\n"," print(f\"[{i}/{len(image_files)}] Processing: {img_name}\")\n","\n"," image = Image.open(img_path).convert(\"RGB\")\n"," input_tensor = preprocess(image).unsqueeze(0).to(device)\n","\n"," with torch.no_grad():\n"," logits = model(input_tensor)\n"," probs = torch.sigmoid(logits).cpu().numpy()[0]\n","\n"," wd_tags = [tags_list[j] for j, prob in enumerate(probs) if prob > threshold]\n"," tag_text = \" , \".join(wd_tags) if add_commas else \" \".join(wd_tags)\n","\n"," base_name = f\"{i:04d}\"\n","\n"," image.save(output_dir / f\"{base_name}.jpg\", \"JPEG\", quality=95)\n"," with open(output_dir / f\"{base_name}.txt\", \"w\", encoding=\"utf-8\") as f:\n"," f.write(tag_text)\n","\n"," print(f\" β Saved {base_name}.jpg + {base_name}.txt ({len(wd_tags)} WD tags)\")\n","\n","print(\"\\nβ
WD tagging complete. Tags saved in /content/wd_tags/\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"De07WpfnGNod","executionInfo":{"status":"ok","timestamp":1775400247581,"user_tz":-120,"elapsed":25514,"user":{"displayName":"No Name","userId":"10578412414437288386"}},"outputId":"ca25213d-6a77-4610-e65e-bf5468126262"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Starting WD tagging for 270 images...\n","[1/270] Processing: 1.jpeg\n"," β Saved 0001.jpg + 0001.txt (7 WD tags)\n","[2/270] Processing: 10.jpeg\n"," β Saved 0002.jpg + 0002.txt (2 WD tags)\n","[3/270] Processing: 100.jpeg\n"," β Saved 0003.jpg + 0003.txt (6 WD tags)\n","[4/270] Processing: 101.jpeg\n"," β Saved 0004.jpg + 0004.txt (5 WD tags)\n","[5/270] Processing: 102.jpeg\n"," β Saved 0005.jpg + 0005.txt (12 WD tags)\n","[6/270] Processing: 103.jpeg\n"," β Saved 0006.jpg + 0006.txt (14 WD tags)\n","[7/270] Processing: 104.jpeg\n"," β Saved 0007.jpg + 0007.txt (17 WD tags)\n","[8/270] Processing: 105.jpeg\n"," β Saved 0008.jpg + 0008.txt (7 WD tags)\n","[9/270] Processing: 106.jpeg\n"," β Saved 0009.jpg + 0009.txt (15 WD tags)\n","[10/270] Processing: 107.jpeg\n"," β Saved 0010.jpg + 0010.txt (8 WD tags)\n","[11/270] Processing: 108.jpeg\n"," β Saved 0011.jpg + 0011.txt (22 WD tags)\n","[12/270] Processing: 109.jpeg\n"," β Saved 0012.jpg + 0012.txt (3 WD tags)\n","[13/270] Processing: 11.jpeg\n"," β Saved 0013.jpg + 0013.txt (12 WD tags)\n","[14/270] Processing: 110.jpeg\n"," β Saved 0014.jpg + 0014.txt (4 WD tags)\n","[15/270] Processing: 111.jpeg\n"," β Saved 0015.jpg + 0015.txt (15 WD tags)\n","[16/270] Processing: 112.jpeg\n"," β Saved 0016.jpg + 0016.txt (3 WD tags)\n","[17/270] Processing: 113.jpeg\n"," β Saved 0017.jpg + 0017.txt (15 WD tags)\n","[18/270] Processing: 114.jpeg\n"," β Saved 0018.jpg + 0018.txt (24 WD tags)\n","[19/270] Processing: 115.jpeg\n"," β Saved 0019.jpg + 0019.txt (9 WD tags)\n","[20/270] Processing: 116.jpeg\n"," β Saved 0020.jpg + 0020.txt (3 WD tags)\n","[21/270] Processing: 117.jpeg\n"," β Saved 0021.jpg + 0021.txt (10 WD tags)\n","[22/270] Processing: 118.jpeg\n"," β Saved 0022.jpg + 0022.txt (9 WD tags)\n","[23/270] Processing: 119.jpeg\n"," β Saved 0023.jpg + 0023.txt (13 WD tags)\n","[24/270] Processing: 12.jpeg\n"," β Saved 0024.jpg + 0024.txt (12 WD tags)\n","[25/270] Processing: 120.jpeg\n"," β Saved 0025.jpg + 0025.txt (5 WD tags)\n","[26/270] Processing: 121.jpeg\n"," β Saved 0026.jpg + 0026.txt (6 WD tags)\n","[27/270] Processing: 122.jpeg\n"," β Saved 0027.jpg + 0027.txt (16 WD tags)\n","[28/270] Processing: 123.jpeg\n"," β Saved 0028.jpg + 0028.txt (5 WD tags)\n","[29/270] Processing: 124.jpeg\n"," β Saved 0029.jpg + 0029.txt (7 WD tags)\n","[30/270] Processing: 125.jpeg\n"," β Saved 0030.jpg + 0030.txt (17 WD tags)\n","[31/270] Processing: 126.jpeg\n"," β Saved 0031.jpg + 0031.txt (10 WD tags)\n","[32/270] Processing: 127.jpeg\n"," β Saved 0032.jpg + 0032.txt (11 WD tags)\n","[33/270] Processing: 128.jpeg\n"," β Saved 0033.jpg + 0033.txt (2 WD tags)\n","[34/270] Processing: 129.jpeg\n"," β Saved 0034.jpg + 0034.txt (13 WD tags)\n","[35/270] Processing: 13.jpeg\n"," β Saved 0035.jpg + 0035.txt (3 WD tags)\n","[36/270] Processing: 130.jpeg\n"," β Saved 0036.jpg + 0036.txt (10 WD tags)\n","[37/270] Processing: 131.jpeg\n"," β Saved 0037.jpg + 0037.txt (14 WD tags)\n","[38/270] Processing: 132.jpeg\n"," β Saved 0038.jpg + 0038.txt (13 WD tags)\n","[39/270] Processing: 133.jpeg\n"," β Saved 0039.jpg + 0039.txt (6 WD tags)\n","[40/270] Processing: 134.jpeg\n"," β Saved 0040.jpg + 0040.txt (9 WD tags)\n","[41/270] Processing: 135.jpeg\n"," β Saved 0041.jpg + 0041.txt (13 WD tags)\n","[42/270] Processing: 136.jpeg\n"," β Saved 0042.jpg + 0042.txt (1 WD tags)\n","[43/270] Processing: 137.jpeg\n"," β Saved 0043.jpg + 0043.txt (13 WD tags)\n","[44/270] Processing: 138.jpeg\n"," β Saved 0044.jpg + 0044.txt (9 WD tags)\n","[45/270] Processing: 139.jpeg\n"," β Saved 0045.jpg + 0045.txt (26 WD tags)\n","[46/270] Processing: 14.jpeg\n"," β Saved 0046.jpg + 0046.txt (1 WD tags)\n","[47/270] Processing: 140.jpeg\n"," β Saved 0047.jpg + 0047.txt (2 WD tags)\n","[48/270] Processing: 141.jpeg\n"," β Saved 0048.jpg + 0048.txt (5 WD tags)\n","[49/270] Processing: 142.jpeg\n"," β Saved 0049.jpg + 0049.txt (7 WD tags)\n","[50/270] Processing: 143.jpeg\n"," β Saved 0050.jpg + 0050.txt (6 WD tags)\n","[51/270] Processing: 144.jpeg\n"," β Saved 0051.jpg + 0051.txt (17 WD tags)\n","[52/270] Processing: 145.jpeg\n"," β Saved 0052.jpg + 0052.txt (8 WD tags)\n","[53/270] Processing: 146.jpeg\n"," β Saved 0053.jpg + 0053.txt (6 WD tags)\n","[54/270] Processing: 147.jpeg\n"," β Saved 0054.jpg + 0054.txt (10 WD tags)\n","[55/270] Processing: 148.jpeg\n"," β Saved 0055.jpg + 0055.txt (6 WD tags)\n","[56/270] Processing: 149.jpeg\n"," β Saved 0056.jpg + 0056.txt (1 WD tags)\n","[57/270] Processing: 15.jpeg\n"," β Saved 0057.jpg + 0057.txt (8 WD tags)\n","[58/270] Processing: 150.jpeg\n"," β Saved 0058.jpg + 0058.txt (8 WD tags)\n","[59/270] Processing: 151.jpeg\n"," β Saved 0059.jpg + 0059.txt (2 WD tags)\n","[60/270] Processing: 152.jpeg\n"," β Saved 0060.jpg + 0060.txt (6 WD tags)\n","[61/270] Processing: 153.jpeg\n"," β Saved 0061.jpg + 0061.txt (5 WD tags)\n","[62/270] Processing: 154.jpeg\n"," β Saved 0062.jpg + 0062.txt (5 WD tags)\n","[63/270] Processing: 155.jpeg\n"," β Saved 0063.jpg + 0063.txt (4 WD tags)\n","[64/270] Processing: 156.jpeg\n"," β Saved 0064.jpg + 0064.txt (2 WD tags)\n","[65/270] Processing: 157.jpeg\n"," β Saved 0065.jpg + 0065.txt (3 WD tags)\n","[66/270] Processing: 158.jpeg\n"," β Saved 0066.jpg + 0066.txt (2 WD tags)\n","[67/270] Processing: 159.jpeg\n"," β Saved 0067.jpg + 0067.txt (13 WD tags)\n","[68/270] Processing: 16.jpeg\n"," β Saved 0068.jpg + 0068.txt (4 WD tags)\n","[69/270] Processing: 160.jpeg\n"," β Saved 0069.jpg + 0069.txt (2 WD tags)\n","[70/270] Processing: 161.jpeg\n"," β Saved 0070.jpg + 0070.txt (4 WD tags)\n","[71/270] Processing: 162.jpeg\n"," β Saved 0071.jpg + 0071.txt (3 WD tags)\n","[72/270] Processing: 163.jpeg\n"," β Saved 0072.jpg + 0072.txt (16 WD tags)\n","[73/270] Processing: 164.jpeg\n"," β Saved 0073.jpg + 0073.txt (6 WD tags)\n","[74/270] Processing: 165.jpeg\n"," β Saved 0074.jpg + 0074.txt (12 WD tags)\n","[75/270] Processing: 166.jpeg\n"," β Saved 0075.jpg + 0075.txt (4 WD tags)\n","[76/270] Processing: 167.jpeg\n"," β Saved 0076.jpg + 0076.txt (8 WD tags)\n","[77/270] Processing: 168.jpeg\n"," β Saved 0077.jpg + 0077.txt (10 WD tags)\n","[78/270] Processing: 169.jpeg\n"," β Saved 0078.jpg + 0078.txt (13 WD tags)\n","[79/270] Processing: 17.jpeg\n"," β Saved 0079.jpg + 0079.txt (13 WD tags)\n","[80/270] Processing: 170.jpeg\n"," β Saved 0080.jpg + 0080.txt (4 WD tags)\n","[81/270] Processing: 171.jpeg\n"," β Saved 0081.jpg + 0081.txt (4 WD tags)\n","[82/270] Processing: 172.jpeg\n"," β Saved 0082.jpg + 0082.txt (2 WD tags)\n","[83/270] Processing: 173.jpeg\n"," β Saved 0083.jpg + 0083.txt (3 WD tags)\n","[84/270] Processing: 174.jpeg\n"," β Saved 0084.jpg + 0084.txt (11 WD tags)\n","[85/270] Processing: 175.jpeg\n"," β Saved 0085.jpg + 0085.txt (1 WD tags)\n","[86/270] Processing: 176.jpeg\n"," β Saved 0086.jpg + 0086.txt (7 WD tags)\n","[87/270] Processing: 177.jpeg\n"," β Saved 0087.jpg + 0087.txt (10 WD tags)\n","[88/270] Processing: 178.jpeg\n"," β Saved 0088.jpg + 0088.txt (18 WD tags)\n","[89/270] Processing: 179.jpeg\n"," β Saved 0089.jpg + 0089.txt (21 WD tags)\n","[90/270] Processing: 18.jpeg\n"," β Saved 0090.jpg + 0090.txt (6 WD tags)\n","[91/270] Processing: 180.jpeg\n"," β Saved 0091.jpg + 0091.txt (3 WD tags)\n","[92/270] Processing: 181.jpeg\n"," β Saved 0092.jpg + 0092.txt (11 WD tags)\n","[93/270] Processing: 182.jpeg\n"," β Saved 0093.jpg + 0093.txt (6 WD tags)\n","[94/270] Processing: 183.jpeg\n"," β Saved 0094.jpg + 0094.txt (17 WD tags)\n","[95/270] Processing: 184.jpeg\n"," β Saved 0095.jpg + 0095.txt (9 WD tags)\n","[96/270] Processing: 185.jpeg\n"," β Saved 0096.jpg + 0096.txt (6 WD tags)\n","[97/270] Processing: 186.jpeg\n"," β Saved 0097.jpg + 0097.txt (8 WD tags)\n","[98/270] Processing: 187.jpeg\n"," β Saved 0098.jpg + 0098.txt (3 WD tags)\n","[99/270] Processing: 188.jpeg\n"," β Saved 0099.jpg + 0099.txt (4 WD tags)\n","[100/270] Processing: 189.jpeg\n"," β Saved 0100.jpg + 0100.txt (6 WD tags)\n","[101/270] Processing: 19.jpeg\n"," β Saved 0101.jpg + 0101.txt (4 WD tags)\n","[102/270] Processing: 190.jpeg\n"," β Saved 0102.jpg + 0102.txt (6 WD tags)\n","[103/270] Processing: 191.jpeg\n"," β Saved 0103.jpg + 0103.txt (7 WD tags)\n","[104/270] Processing: 192.jpeg\n"," β Saved 0104.jpg + 0104.txt (12 WD tags)\n","[105/270] Processing: 193.jpeg\n"," β Saved 0105.jpg + 0105.txt (10 WD tags)\n","[106/270] Processing: 194.jpeg\n"," β Saved 0106.jpg + 0106.txt (5 WD tags)\n","[107/270] Processing: 195.jpeg\n"," β Saved 0107.jpg + 0107.txt (11 WD tags)\n","[108/270] Processing: 196.jpeg\n"," β Saved 0108.jpg + 0108.txt (13 WD tags)\n","[109/270] Processing: 197.jpeg\n"," β Saved 0109.jpg + 0109.txt (11 WD tags)\n","[110/270] Processing: 198.jpeg\n"," β Saved 0110.jpg + 0110.txt (10 WD tags)\n","[111/270] Processing: 199.jpeg\n"," β Saved 0111.jpg + 0111.txt (14 WD tags)\n","[112/270] Processing: 2.jpeg\n"," β Saved 0112.jpg + 0112.txt (6 WD tags)\n","[113/270] Processing: 20.jpeg\n"," β Saved 0113.jpg + 0113.txt (4 WD tags)\n","[114/270] Processing: 200.jpeg\n"," β Saved 0114.jpg + 0114.txt (19 WD tags)\n","[115/270] Processing: 201.jpeg\n"," β Saved 0115.jpg + 0115.txt (12 WD tags)\n","[116/270] Processing: 202.jpeg\n"," β Saved 0116.jpg + 0116.txt (13 WD tags)\n","[117/270] Processing: 203.jpeg\n"," β Saved 0117.jpg + 0117.txt (4 WD tags)\n","[118/270] Processing: 204.jpeg\n"," β Saved 0118.jpg + 0118.txt (11 WD tags)\n","[119/270] Processing: 205.jpeg\n"," β Saved 0119.jpg + 0119.txt (12 WD tags)\n","[120/270] Processing: 206.jpeg\n"," β Saved 0120.jpg + 0120.txt (13 WD tags)\n","[121/270] Processing: 207.jpeg\n"," β Saved 0121.jpg + 0121.txt (19 WD tags)\n","[122/270] Processing: 208.jpeg\n"," β Saved 0122.jpg + 0122.txt (12 WD tags)\n","[123/270] Processing: 209.jpeg\n"," β Saved 0123.jpg + 0123.txt (12 WD tags)\n","[124/270] Processing: 21.jpeg\n"," β Saved 0124.jpg + 0124.txt (2 WD tags)\n","[125/270] Processing: 210.jpeg\n"," β Saved 0125.jpg + 0125.txt (10 WD tags)\n","[126/270] Processing: 211.jpeg\n"," β Saved 0126.jpg + 0126.txt (10 WD tags)\n","[127/270] Processing: 212.jpeg\n"," β Saved 0127.jpg + 0127.txt (10 WD tags)\n","[128/270] Processing: 213.jpeg\n"," β Saved 0128.jpg + 0128.txt (12 WD tags)\n","[129/270] Processing: 214.jpeg\n"," β Saved 0129.jpg + 0129.txt (12 WD tags)\n","[130/270] Processing: 215.jpeg\n"," β Saved 0130.jpg + 0130.txt (4 WD tags)\n","[131/270] Processing: 216.jpeg\n"," β Saved 0131.jpg + 0131.txt (4 WD tags)\n","[132/270] Processing: 217.jpeg\n"," β Saved 0132.jpg + 0132.txt (1 WD tags)\n","[133/270] Processing: 218.jpeg\n"," β Saved 0133.jpg + 0133.txt (5 WD tags)\n","[134/270] Processing: 219.jpeg\n"," β Saved 0134.jpg + 0134.txt (2 WD tags)\n","[135/270] Processing: 22.jpeg\n"," β Saved 0135.jpg + 0135.txt (12 WD tags)\n","[136/270] Processing: 220.jpeg\n"," β Saved 0136.jpg + 0136.txt (0 WD tags)\n","[137/270] Processing: 221.jpeg\n"," β Saved 0137.jpg + 0137.txt (4 WD tags)\n","[138/270] Processing: 222.jpeg\n"," β Saved 0138.jpg + 0138.txt (6 WD tags)\n","[139/270] Processing: 223.jpeg\n"," β Saved 0139.jpg + 0139.txt (5 WD tags)\n","[140/270] Processing: 224.jpeg\n"," β Saved 0140.jpg + 0140.txt (3 WD tags)\n","[141/270] Processing: 225.jpeg\n"," β Saved 0141.jpg + 0141.txt (3 WD tags)\n","[142/270] Processing: 226.jpeg\n"," β Saved 0142.jpg + 0142.txt (22 WD tags)\n","[143/270] Processing: 227.jpeg\n"," β Saved 0143.jpg + 0143.txt (3 WD tags)\n","[144/270] Processing: 228.jpeg\n"," β Saved 0144.jpg + 0144.txt (2 WD tags)\n","[145/270] Processing: 229.jpeg\n"," β Saved 0145.jpg + 0145.txt (4 WD tags)\n","[146/270] Processing: 23.jpeg\n"," β Saved 0146.jpg + 0146.txt (16 WD tags)\n","[147/270] Processing: 230.jpeg\n"," β Saved 0147.jpg + 0147.txt (4 WD tags)\n","[148/270] Processing: 231.jpeg\n"," β Saved 0148.jpg + 0148.txt (6 WD tags)\n","[149/270] Processing: 232.jpeg\n"," β Saved 0149.jpg + 0149.txt (3 WD tags)\n","[150/270] Processing: 233.jpeg\n"," β Saved 0150.jpg + 0150.txt (2 WD tags)\n","[151/270] Processing: 234.jpeg\n"," β Saved 0151.jpg + 0151.txt (14 WD tags)\n","[152/270] Processing: 235.jpeg\n"," β Saved 0152.jpg + 0152.txt (18 WD tags)\n","[153/270] Processing: 236.jpeg\n"," β Saved 0153.jpg + 0153.txt (3 WD tags)\n","[154/270] Processing: 237.jpeg\n"," β Saved 0154.jpg + 0154.txt (2 WD tags)\n","[155/270] Processing: 238.jpeg\n"," β Saved 0155.jpg + 0155.txt (3 WD tags)\n","[156/270] Processing: 239.jpeg\n"," β Saved 0156.jpg + 0156.txt (3 WD tags)\n","[157/270] Processing: 24.jpeg\n"," β Saved 0157.jpg + 0157.txt (2 WD tags)\n","[158/270] Processing: 240.jpeg\n"," β Saved 0158.jpg + 0158.txt (0 WD tags)\n","[159/270] Processing: 241.jpeg\n"," β Saved 0159.jpg + 0159.txt (2 WD tags)\n","[160/270] Processing: 242.jpeg\n"," β Saved 0160.jpg + 0160.txt (6 WD tags)\n","[161/270] Processing: 243.jpeg\n"," β Saved 0161.jpg + 0161.txt (3 WD tags)\n","[162/270] Processing: 244.jpeg\n"," β Saved 0162.jpg + 0162.txt (1 WD tags)\n","[163/270] Processing: 245.jpeg\n"," β Saved 0163.jpg + 0163.txt (3 WD tags)\n","[164/270] Processing: 246.jpeg\n"," β Saved 0164.jpg + 0164.txt (3 WD tags)\n","[165/270] Processing: 247.jpeg\n"," β Saved 0165.jpg + 0165.txt (3 WD tags)\n","[166/270] Processing: 248.jpeg\n"," β Saved 0166.jpg + 0166.txt (12 WD tags)\n","[167/270] Processing: 249.jpeg\n"," β Saved 0167.jpg + 0167.txt (1 WD tags)\n","[168/270] Processing: 25.jpeg\n"," β Saved 0168.jpg + 0168.txt (4 WD tags)\n","[169/270] Processing: 250.jpeg\n"," β Saved 0169.jpg + 0169.txt (5 WD tags)\n","[170/270] Processing: 251.jpeg\n"," β Saved 0170.jpg + 0170.txt (3 WD tags)\n","[171/270] Processing: 252.jpeg\n"," β Saved 0171.jpg + 0171.txt (10 WD tags)\n","[172/270] Processing: 253.jpeg\n"," β Saved 0172.jpg + 0172.txt (7 WD tags)\n","[173/270] Processing: 254.jpeg\n"," β Saved 0173.jpg + 0173.txt (3 WD tags)\n","[174/270] Processing: 255.jpeg\n"," β Saved 0174.jpg + 0174.txt (4 WD tags)\n","[175/270] Processing: 256.jpeg\n"," β Saved 0175.jpg + 0175.txt (6 WD tags)\n","[176/270] Processing: 257.jpeg\n"," β Saved 0176.jpg + 0176.txt (5 WD tags)\n","[177/270] Processing: 258.jpeg\n"," β Saved 0177.jpg + 0177.txt (4 WD tags)\n","[178/270] Processing: 259.jpeg\n"," β Saved 0178.jpg + 0178.txt (2 WD tags)\n","[179/270] Processing: 26.jpeg\n"," β Saved 0179.jpg + 0179.txt (5 WD tags)\n","[180/270] Processing: 260.jpeg\n"," β Saved 0180.jpg + 0180.txt (4 WD tags)\n","[181/270] Processing: 261.jpeg\n"," β Saved 0181.jpg + 0181.txt (4 WD tags)\n","[182/270] Processing: 262.jpeg\n"," β Saved 0182.jpg + 0182.txt (4 WD tags)\n","[183/270] Processing: 263.jpeg\n"," β Saved 0183.jpg + 0183.txt (11 WD tags)\n","[184/270] Processing: 264.jpeg\n"," β Saved 0184.jpg + 0184.txt (8 WD tags)\n","[185/270] Processing: 265.jpeg\n"," β Saved 0185.jpg + 0185.txt (4 WD tags)\n","[186/270] Processing: 266.jpeg\n"," β Saved 0186.jpg + 0186.txt (5 WD tags)\n","[187/270] Processing: 267.jpeg\n"," β Saved 0187.jpg + 0187.txt (8 WD tags)\n","[188/270] Processing: 268.jpeg\n"," β Saved 0188.jpg + 0188.txt (8 WD tags)\n","[189/270] Processing: 269.jpeg\n"," β Saved 0189.jpg + 0189.txt (8 WD tags)\n","[190/270] Processing: 27.jpeg\n"," β Saved 0190.jpg + 0190.txt (9 WD tags)\n","[191/270] Processing: 270.jpeg\n"," β Saved 0191.jpg + 0191.txt (8 WD tags)\n","[192/270] Processing: 28.jpeg\n"," β Saved 0192.jpg + 0192.txt (15 WD tags)\n","[193/270] Processing: 29.jpeg\n"," β Saved 0193.jpg + 0193.txt (5 WD tags)\n","[194/270] Processing: 3.jpeg\n"," β Saved 0194.jpg + 0194.txt (14 WD tags)\n","[195/270] Processing: 30.jpeg\n"," β Saved 0195.jpg + 0195.txt (3 WD tags)\n","[196/270] Processing: 31.jpeg\n"," β Saved 0196.jpg + 0196.txt (7 WD tags)\n","[197/270] Processing: 32.jpeg\n"," β Saved 0197.jpg + 0197.txt (2 WD tags)\n","[198/270] Processing: 33.jpeg\n"," β Saved 0198.jpg + 0198.txt (3 WD tags)\n","[199/270] Processing: 34.jpeg\n"," β Saved 0199.jpg + 0199.txt (5 WD tags)\n","[200/270] Processing: 35.jpeg\n"," β Saved 0200.jpg + 0200.txt (6 WD tags)\n","[201/270] Processing: 36.jpeg\n"," β Saved 0201.jpg + 0201.txt (8 WD tags)\n","[202/270] Processing: 37.jpeg\n"," β Saved 0202.jpg + 0202.txt (10 WD tags)\n","[203/270] Processing: 38.jpeg\n"," β Saved 0203.jpg + 0203.txt (4 WD tags)\n","[204/270] Processing: 39.jpeg\n"," β Saved 0204.jpg + 0204.txt (9 WD tags)\n","[205/270] Processing: 4.jpeg\n"," β Saved 0205.jpg + 0205.txt (6 WD tags)\n","[206/270] Processing: 40.jpeg\n"," β Saved 0206.jpg + 0206.txt (1 WD tags)\n","[207/270] Processing: 41.jpeg\n"," β Saved 0207.jpg + 0207.txt (4 WD tags)\n","[208/270] Processing: 42.jpeg\n"," β Saved 0208.jpg + 0208.txt (3 WD tags)\n","[209/270] Processing: 43.jpeg\n"," β Saved 0209.jpg + 0209.txt (13 WD tags)\n","[210/270] Processing: 44.jpeg\n"," β Saved 0210.jpg + 0210.txt (5 WD tags)\n","[211/270] Processing: 45.jpeg\n"," β Saved 0211.jpg + 0211.txt (3 WD tags)\n","[212/270] Processing: 46.jpeg\n"," β Saved 0212.jpg + 0212.txt (3 WD tags)\n","[213/270] Processing: 47.jpeg\n"," β Saved 0213.jpg + 0213.txt (1 WD tags)\n","[214/270] Processing: 48.jpeg\n"," β Saved 0214.jpg + 0214.txt (5 WD tags)\n","[215/270] Processing: 49.jpeg\n"," β Saved 0215.jpg + 0215.txt (5 WD tags)\n","[216/270] Processing: 5.jpeg\n"," β Saved 0216.jpg + 0216.txt (3 WD tags)\n","[217/270] Processing: 50.jpeg\n"," β Saved 0217.jpg + 0217.txt (6 WD tags)\n","[218/270] Processing: 51.jpeg\n"," β Saved 0218.jpg + 0218.txt (4 WD tags)\n","[219/270] Processing: 52.jpeg\n"," β Saved 0219.jpg + 0219.txt (2 WD tags)\n","[220/270] Processing: 53.jpeg\n"," β Saved 0220.jpg + 0220.txt (7 WD tags)\n","[221/270] Processing: 54.jpeg\n"," β Saved 0221.jpg + 0221.txt (12 WD tags)\n","[222/270] Processing: 55.jpeg\n"," β Saved 0222.jpg + 0222.txt (23 WD tags)\n","[223/270] Processing: 56.jpeg\n"," β Saved 0223.jpg + 0223.txt (8 WD tags)\n","[224/270] Processing: 57.jpeg\n"," β Saved 0224.jpg + 0224.txt (3 WD tags)\n","[225/270] Processing: 58.jpeg\n"," β Saved 0225.jpg + 0225.txt (8 WD tags)\n","[226/270] Processing: 59.jpeg\n"," β Saved 0226.jpg + 0226.txt (4 WD tags)\n","[227/270] Processing: 6.jpeg\n"," β Saved 0227.jpg + 0227.txt (0 WD tags)\n","[228/270] Processing: 60.jpeg\n"," β Saved 0228.jpg + 0228.txt (37 WD tags)\n","[229/270] Processing: 61.jpeg\n"," β Saved 0229.jpg + 0229.txt (0 WD tags)\n","[230/270] Processing: 62.jpeg\n"," β Saved 0230.jpg + 0230.txt (1 WD tags)\n","[231/270] Processing: 63.jpeg\n"," β Saved 0231.jpg + 0231.txt (7 WD tags)\n","[232/270] Processing: 64.jpeg\n"," β Saved 0232.jpg + 0232.txt (5 WD tags)\n","[233/270] Processing: 65.jpeg\n"," β Saved 0233.jpg + 0233.txt (6 WD tags)\n","[234/270] Processing: 66.jpeg\n"," β Saved 0234.jpg + 0234.txt (1 WD tags)\n","[235/270] Processing: 67.jpeg\n"," β Saved 0235.jpg + 0235.txt (4 WD tags)\n","[236/270] Processing: 68.jpeg\n"," β Saved 0236.jpg + 0236.txt (7 WD tags)\n","[237/270] Processing: 69.jpeg\n"," β Saved 0237.jpg + 0237.txt (4 WD tags)\n","[238/270] Processing: 7.jpeg\n"," β Saved 0238.jpg + 0238.txt (5 WD tags)\n","[239/270] Processing: 70.jpeg\n"," β Saved 0239.jpg + 0239.txt (5 WD tags)\n","[240/270] Processing: 71.jpeg\n"," β Saved 0240.jpg + 0240.txt (14 WD tags)\n","[241/270] Processing: 72.jpeg\n"," β Saved 0241.jpg + 0241.txt (10 WD tags)\n","[242/270] Processing: 73.jpeg\n"," β Saved 0242.jpg + 0242.txt (9 WD tags)\n","[243/270] Processing: 74.jpeg\n"," β Saved 0243.jpg + 0243.txt (9 WD tags)\n","[244/270] Processing: 75.jpeg\n"," β Saved 0244.jpg + 0244.txt (6 WD tags)\n","[245/270] Processing: 76.jpeg\n"," β Saved 0245.jpg + 0245.txt (4 WD tags)\n","[246/270] Processing: 77.jpeg\n"," β Saved 0246.jpg + 0246.txt (6 WD tags)\n","[247/270] Processing: 78.jpeg\n"," β Saved 0247.jpg + 0247.txt (4 WD tags)\n","[248/270] Processing: 79.jpeg\n"," β Saved 0248.jpg + 0248.txt (2 WD tags)\n","[249/270] Processing: 8.jpeg\n"," β Saved 0249.jpg + 0249.txt (4 WD tags)\n","[250/270] Processing: 80.jpeg\n"," β Saved 0250.jpg + 0250.txt (12 WD tags)\n","[251/270] Processing: 81.jpeg\n"," β Saved 0251.jpg + 0251.txt (18 WD tags)\n","[252/270] Processing: 82.jpeg\n"," β Saved 0252.jpg + 0252.txt (0 WD tags)\n","[253/270] Processing: 83.jpeg\n"," β Saved 0253.jpg + 0253.txt (8 WD tags)\n","[254/270] Processing: 84.jpeg\n"," β Saved 0254.jpg + 0254.txt (5 WD tags)\n","[255/270] Processing: 85.jpeg\n"," β Saved 0255.jpg + 0255.txt (7 WD tags)\n","[256/270] Processing: 86.jpeg\n"," β Saved 0256.jpg + 0256.txt (7 WD tags)\n","[257/270] Processing: 87.jpeg\n"," β Saved 0257.jpg + 0257.txt (17 WD tags)\n","[258/270] Processing: 88.jpeg\n"," β Saved 0258.jpg + 0258.txt (9 WD tags)\n","[259/270] Processing: 89.jpeg\n"," β Saved 0259.jpg + 0259.txt (3 WD tags)\n","[260/270] Processing: 9.jpeg\n"," β Saved 0260.jpg + 0260.txt (0 WD tags)\n","[261/270] Processing: 90.jpeg\n"," β Saved 0261.jpg + 0261.txt (4 WD tags)\n","[262/270] Processing: 91.jpeg\n"," β Saved 0262.jpg + 0262.txt (10 WD tags)\n","[263/270] Processing: 92.jpeg\n"," β Saved 0263.jpg + 0263.txt (12 WD tags)\n","[264/270] Processing: 93.jpeg\n"," β Saved 0264.jpg + 0264.txt (11 WD tags)\n","[265/270] Processing: 94.jpeg\n"," β Saved 0265.jpg + 0265.txt (27 WD tags)\n","[266/270] Processing: 95.jpeg\n"," β Saved 0266.jpg + 0266.txt (3 WD tags)\n","[267/270] Processing: 96.jpeg\n"," β Saved 0267.jpg + 0267.txt (10 WD tags)\n","[268/270] Processing: 97.jpeg\n"," β Saved 0268.jpg + 0268.txt (4 WD tags)\n","[269/270] Processing: 98.jpeg\n"," β Saved 0269.jpg + 0269.txt (6 WD tags)\n","[270/270] Processing: 99.jpeg\n"," β Saved 0270.jpg + 0270.txt (6 WD tags)\n","\n","β
WD tagging complete. Tags saved in /content/wd_tags/\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 5: Clear VRAM + Load DINO Tagger\n","import torch\n","import gc\n","import sys\n","from pathlib import Path\n","\n","# Add WORKING_DIR to sys.path\n","if str(WORKING_DIR) not in sys.path:\n"," sys.path.insert(0, str(WORKING_DIR))\n","\n","from inference_tagger_standalone import Tagger\n","\n","# Clear VRAM\n","print(\"π§Ή Clearing VRAM...\")\n","if 'model' in globals(): del model\n","torch.cuda.empty_cache()\n","gc.collect()\n","\n","print(\"\\nπ₯ Loading DINO Tagger...\")\n","tagger = Tagger(\n"," checkpoint_path=str(Path(WORKING_DIR) / \"tagger_proto.safetensors\"),\n"," vocab_path=str(Path(WORKING_DIR) / \"tagger_vocab_with_categories.json\"),\n"," device=\"cuda\" if torch.cuda.is_available() else \"cpu\",\n"," dtype=torch.bfloat16 if torch.cuda.is_available() else torch.float32,\n"," max_size=1024\n",")\n","\n","print(f\"β
DINO Tagger loaded on {tagger.device}\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"kM6DLBVAGY-2","executionInfo":{"status":"ok","timestamp":1775400368898,"user_tz":-120,"elapsed":121314,"user":{"displayName":"No Name","userId":"10578412414437288386"}},"outputId":"98423f70-f60b-4724-a2aa-56e16b439464"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["π§Ή Clearing VRAM...\n","\n","π₯ Loading DINO Tagger...\n","[Tagger] Vocabulary: 74,625 tags\n","[Tagger] Loading checkpoint: /content/drive/MyDrive/DinoTaggerPipeline/tagger_proto.safetensors\n","[Tagger] Ready on cuda (torch.bfloat16)\n","β
DINO Tagger loaded on cuda\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 6.a: Tag with DINO Tagger + Build Full Instruction Prompt + Original Caption (//--// separator)\n","from PIL import Image\n","from pathlib import Path\n","import json\n","\n","threshold_percent = 80 #@param {type:\"slider\", min:1, max:95, step:1}\n","max_tags = 100 #@param {type:\"slider\", min:5, max:150, step:5}\n","use_max_tags = False #@param {type:\"boolean\"}\n","\n","wd_dir = Path(\"/content/wd_tags\")\n","dino_dir = Path(\"/content/dino_combined\")\n","dino_dir.mkdir(exist_ok=True)\n","\n","image_files = sorted(list(wd_dir.glob(\"*.jpg\")))\n","threshold = threshold_percent / 100.0\n","\n","# Load original caption mapping\n","with open(\"/content/image_stems_order.json\", \"r\", encoding=\"utf-8\") as f:\n"," image_stems_order = json.load(f)\n","with open(\"/content/caption_map.json\", \"r\", encoding=\"utf-8\") as f:\n"," caption_map = json.load(f)\n","\n","# === FULL INSTRUCTION TEMPLATE (tags will replace <TAGS>) ===\n","instruction_template = \"\"\"You are a describer of images featuring female fictional characters. Within the text description you should use these words in the text : <TAGS>\n","Your description should not have newline or *bold font* or itemization. Assume the artstyle to be a 3D render. Assume ambigious gendered people to be female , a clearly refer to them as such. If you see text in the image that is unclear , just say that the description is \"CHROMA\" for simplicity. Imagine an object , water , or silhuette based on the image. The object can also be furniture such as a table with a green potted plant. Describe the image if this object / person / silhuette / furniture foreground should be something blurry seen up close , which is obscuring the image from the top , bottom or right side. the main image should have a sharp camera focus in contrast to the blurry foreground. You may not use value driven wording. You may not use superlative language. I want output to be arranged as a single block of text approx. 400 words long.\"\"\"\n","\n","print(f\"Starting DINO tagging + full instruction prompt creation for {len(image_files)} images...\")\n","\n","for i, img_path in enumerate(image_files, start=1):\n"," base_name = img_path.stem\n"," txt_path = wd_dir / f\"{base_name}.txt\"\n","\n"," print(f\"[{i}/{len(image_files)}] Processing: {img_path.name}\")\n","\n"," if use_max_tags:\n"," tags_list = tagger.predict(str(img_path), topk=max_tags, threshold=None)\n"," else:\n"," tags_list = tagger.predict(str(img_path), topk=None, threshold=threshold)\n"," if len(tags_list) > max_tags:\n"," tags_list = tags_list[:max_tags]\n","\n"," dino_tags = [tag for tag, prob in tags_list]\n","\n"," # Read WD tags\n"," if txt_path.exists():\n"," with open(txt_path, \"r\", encoding=\"utf-8\") as f:\n"," existing_text = f.read().strip()\n"," existing_tags = [t.strip() for t in existing_text.replace(\",\", \" \").split() if t.strip()]\n"," else:\n"," existing_tags = []\n","\n"," # Combine tags (WD + DINO, deduplicated)\n"," combined = existing_tags + dino_tags\n"," seen = set()\n"," final_tags = [tag for tag in combined if not (tag in seen or seen.add(tag))]\n","\n"," tag_text = \" , \".join(final_tags) if add_commas else \" \".join(final_tags)\n","\n"," # === INSERT TAGS INTO THE FULL INSTRUCTION TEMPLATE ===\n"," final_instruction = instruction_template.replace(\"<TAGS>\", tag_text)\n","\n"," # === ADD ORIGINAL CAPTION AFTER SEPARATOR ===\n"," original_stem = image_stems_order[i-1]\n"," original_caption = caption_map.get(original_stem, \"\").strip()\n","\n"," if original_caption:\n"," final_text = f\"{final_instruction}//--//{original_caption}\"\n"," else:\n"," final_text = final_instruction\n","\n"," # Save updated pair\n"," new_img = dino_dir / f\"{base_name}.jpg\"\n"," new_txt = dino_dir / f\"{base_name}.txt\"\n","\n"," Image.open(img_path).convert(\"RGB\").save(new_img, \"JPEG\", quality=95)\n"," with open(new_txt, \"w\", encoding=\"utf-8\") as f:\n"," f.write(final_text)\n","\n"," print(f\" β Saved {base_name}.jpg + {base_name}.txt (FULL INSTRUCTION + tags//--//original caption)\")\n","\n","print(\"\\nβ
DINO tagging + full instruction prompt creation complete.\")"],"metadata":{"id":"vleqSnOAGdLv"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["#@markdown # Cell 6.b: Save Final Image-Text Pairs to gemma4_lora_training_set.zip\n","from pathlib import Path\n","import zipfile\n","import shutil\n","\n","print(\"πΎ Saving final image-text pairs (with full instruction prompt + tags//--//original caption) to Google Drive...\")\n","\n","dino_dir = Path(\"/content/dino_combined\")\n","if not dino_dir.exists() or not list(dino_dir.glob(\"*.jpg\")):\n"," print(\"β No /content/dino_combined folder found. Please run previous cells first.\")\n","else:\n"," final_folder = Path(\"/content/drive/MyDrive/gemma4_lora_training_set\")\n"," final_folder.mkdir(parents=True, exist_ok=True)\n","\n"," print(f\" Copying {len(list(dino_dir.glob('*.jpg')))} updated image + text pairs...\")\n"," for item in dino_dir.iterdir():\n"," if item.is_file():\n"," shutil.copy2(item, final_folder / item.name)\n","\n"," # Create the exact zip you requested\n"," final_zip_path = Path(\"/content/drive/MyDrive/gemma4_lora_training_set.zip\")\n"," with zipfile.ZipFile(final_zip_path, \"w\") as zipf:\n"," for file in final_folder.iterdir():\n"," zipf.write(file, file.name)\n","\n"," print(f\"\\nπ ALL DONE!\")\n"," print(f\" β Final folder: {final_folder}\")\n"," print(f\" β ZIP saved exactly as: {final_zip_path}\")\n"," print(\"\\nYou can now safely disconnect the runtime.\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"RvPjcmXjH2mg","executionInfo":{"status":"ok","timestamp":1775401384466,"user_tz":-120,"elapsed":10536,"user":{"displayName":"No Name","userId":"10578412414437288386"}},"outputId":"bcf19c4b-7af2-4cc3-e5bd-bf1a537f85af"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["πΎ Saving final image-text pairs (with full instruction prompt + tags//--//original caption) to Google Drive...\n"," Copying 270 updated image + text pairs...\n","\n","π ALL DONE!\n"," β Final folder: /content/drive/MyDrive/gemma4_lora_training_set\n"," β ZIP saved exactly as: /content/drive/MyDrive/gemma4_lora_training_set.zip\n","\n","You can now safely disconnect the runtime.\n"]}]},{"cell_type":"code","execution_count":null,"metadata":{"id":"FQF71-mvmlc1"},"outputs":[],"source":["# ================================================\n","# Auto Disconnect Colab Session\n","# ================================================\n","\n","print(\"π Disconnecting Colab session in 15 seconds...\")\n","import time\n","time.sleep(3)\n","\n","from google.colab import runtime\n","runtime.unassign()\n","\n","print(\"Session disconnected.\")"]},{"cell_type":"markdown","source":["Disconnect"],"metadata":{"id":"BWmBDLN_K-Na"}},{"cell_type":"code","source":["#@markdown # Cell 1: Connect Google Drive + Restore HF_TOKEN (Run this FIRST)\n","from google.colab import drive\n","from google.colab import userdata\n","import os\n","from pathlib import Path\n","\n","print(\"π Connecting to Google Drive and setting up HF_TOKEN...\")\n","\n","# Mount Drive\n","drive.mount('/content/drive', force_remount=True)\n","\n","# Create working folder (everything will be saved here)\n","WORKING_DIR = \"/content/drive/MyDrive/Gemma4Captioner\"\n","os.makedirs(WORKING_DIR, exist_ok=True)\n","print(f\"β
Working directory ready: {WORKING_DIR}\")\n","\n","# Restore HF_TOKEN from Colab secrets\n","hf_token = userdata.get('HF_TOKEN')\n","if hf_token:\n"," os.environ[\"HF_TOKEN\"] = hf_token\n"," print(\"β
HF_TOKEN restored from Colab Secrets.\")\n","else:\n"," print(\"β οΈ HF_TOKEN not found. Please click the key icon on the left sidebar, add 'HF_TOKEN', and run this cell again.\")\n","\n","print(\"\\nβ
Cell 1 complete! Run Cell 2 next.\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"I4QXbDNCi0fA","executionInfo":{"status":"ok","timestamp":1775408210858,"user_tz":-120,"elapsed":22319,"user":{"displayName":"fukU Google","userId":"02763165356193834046"}},"outputId":"6efe2b9b-83fa-49e2-ec7f-60f36dee9fab"},"execution_count":1,"outputs":[{"output_type":"stream","name":"stdout","text":["π Connecting to Google Drive and setting up HF_TOKEN...\n","Mounted at /content/drive\n","β
Working directory ready: /content/drive/MyDrive/Gemma4Captioner\n","β
HF_TOKEN restored from Colab Secrets.\n","\n","β
Cell 1 complete! Run Cell 2 next.\n"]}]},{"cell_type":"code","source":["#@markdown # Cell 2: Fix PyTorch + torchvision + Transformers (Run this AFTER Cell 1)\n","\n","import torch\n","print(\"π Current PyTorch version :\", torch.__version__)\n","print(\"π PyTorch CUDA version :\", torch.version.cuda)\n","print(\"π CUDA available :\", torch.cuda.is_available())\n","\n","# === Fix only torchvision + torchaudio to match your exact torch version ===\n","print(\"\\nπ Fixing torchvision + torchaudio to match torch 2.11.0+cu128...\")\n","\n","# Uninstall only the mismatched packages\n","!pip uninstall -y torchvision torchaudio --quiet\n","\n","# Auto-detect matching versions (torch 2.11.0 β torchvision 0.26.0)\n","torch_base = torch.__version__.split('+')[0] # e.g. \"2.11.0\"\n","torch_minor = int(torch_base.split('.')[1]) # 11\n","tv_minor = torch_minor + 15 # 26\n","tv_version = f\"0.{tv_minor}.0+cu128\"\n","ta_version = f\"{torch_base}+cu128\"\n","\n","print(f\" β Installing torchvision=={tv_version}\")\n","print(f\" β Installing torchaudio=={ta_version}\")\n","\n","!pip install --force-reinstall --no-deps \\\n"," torchvision=={tv_version} torchaudio=={ta_version} \\\n"," --index-url https://download.pytorch.org/whl/cu128 --quiet\n","\n","# === Install latest Transformers + required packages ===\n","print(\"\\nπ Installing latest Transformers (needed for gemma4) + accelerate + bitsandbytes...\")\n","!pip install -q --upgrade --force-reinstall \"pillow<12.0\" transformers accelerate bitsandbytes\n","\n","print(\"\\nβ
All dependencies installed!\")\n","\n","# === Debug printouts ===\n","import transformers\n","import torchvision\n","print(\"\\nπ FINAL VERSION DEBUG:\")\n","print(\"torch.__version__ =\", torch.__version__)\n","print(\"torchvision.__version__ =\", torchvision.__version__)\n","print(\"transformers.__version__ =\", transformers.__version__)\n","print(\"CUDA available =\", torch.cuda.is_available())\n","\n","print(\"\\nβ
Cell 2 complete! Run Cell 3 next.\")\n","print(\" (If you see any import error below, do Runtime β Restart session, then re-run Cell 2 + Cell 3)\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":1000},"id":"5NvY4DFEmat7","executionInfo":{"status":"error","timestamp":1775408788152,"user_tz":-120,"elapsed":99991,"user":{"displayName":"fukU Google","userId":"02763165356193834046"}},"outputId":"2c846a97-346e-447b-fed8-70d1d9bbcafa"},"execution_count":3,"outputs":[{"output_type":"stream","name":"stdout","text":["π Current PyTorch version : 2.10.0+cu128\n","π PyTorch CUDA version : 12.8\n","π CUDA available : True\n","\n","π Fixing torchvision + torchaudio to match torch 2.11.0+cu128...\n"," β Installing torchvision==0.25.0+cu128\n"," β Installing torchaudio==2.10.0+cu128\n","\n","π Installing latest Transformers (needed for gemma4) + accelerate + bitsandbytes...\n","\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","ipython 7.34.0 requires jedi>=0.16, which is not installed.\n","torchvision 0.25.0+cu128 requires torch==2.10.0, but you have torch 2.11.0 which is incompatible.\n","torchaudio 2.10.0+cu128 requires torch==2.10.0, but you have torch 2.11.0 which is incompatible.\n","libcuvs-cu12 26.2.0 requires cuda-toolkit[cublas,curand,cusolver,cusparse]==12.*, but you have cuda-toolkit 13.0.2 which is incompatible.\n","libraft-cu12 26.2.0 requires cuda-toolkit[cublas,curand,cusolver,cusparse]==12.*, but you have cuda-toolkit 13.0.2 which is incompatible.\n","libcuml-cu12 26.2.0 requires cuda-toolkit[cublas,cufft,curand,cusolver,cusparse]==12.*, but you have cuda-toolkit 13.0.2 which is incompatible.\n","cuml-cu12 26.2.0 requires cuda-toolkit[cublas,cufft,curand,cusolver,cusparse]==12.*, but you have cuda-toolkit 13.0.2 which is incompatible.\n","datasets 4.0.0 requires fsspec[http]<=2025.3.0,>=2023.1.0, but you have fsspec 2026.3.0 which is incompatible.\n","numba 0.60.0 requires numpy<2.1,>=1.22, but you have numpy 2.4.4 which is incompatible.\n","cuda-python 12.9.4 requires cuda-bindings~=12.9.4, but you have cuda-bindings 13.2.0 which is incompatible.\n","cudf-cu12 26.2.1 requires cuda-toolkit[nvcc,nvrtc]==12.*, but you have cuda-toolkit 13.0.2 which is incompatible.\n","bigframes 2.38.0 requires rich<14,>=12.4.4, but you have rich 14.3.3 which is incompatible.\n","tensorflow 2.19.0 requires numpy<2.2.0,>=1.26.0, but you have numpy 2.4.4 which is incompatible.\n","gcsfs 2025.3.0 requires fsspec==2025.3.0, but you have fsspec 2026.3.0 which is incompatible.\u001b[0m\u001b[31m\n","\u001b[0m\n","β
All dependencies installed!\n"]},{"output_type":"error","ename":"ImportError","evalue":"cannot import name 'is_opaque_value' from 'torch._library.opaque_object' (/usr/local/lib/python3.12/dist-packages/torch/_library/opaque_object.py)","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)","\u001b[0;32m/tmp/ipykernel_4388/3154538250.py\u001b[0m in \u001b[0;36m<cell line: 0>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[0;31m# === Debug printouts ===\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtransformers\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 34\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mtorchvision\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 35\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"\\nπ FINAL VERSION DEBUG:\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"torch.__version__ =\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__version__\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torchvision/__init__.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;31m# .extensions) before entering _meta_registrations.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mextension\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_HAS_OPS\u001b[0m \u001b[0;31m# usort:skip\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 10\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mtorchvision\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_meta_registrations\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdatasets\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mio\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmodels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mops\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtransforms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mutils\u001b[0m \u001b[0;31m# usort:skip\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 11\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torchvision/models/__init__.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0malexnet\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mconvnext\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mdensenet\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mefficientnet\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mgooglenet\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torchvision/models/convnext.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmisc\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mConv2dNormActivation\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mPermute\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 10\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstochastic_depth\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mStochasticDepth\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 11\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtransforms\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_presets\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mImageClassification\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_log_api_usage_once\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torchvision/ops/__init__.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mgiou_loss\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mgeneralized_box_iou_loss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mmisc\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mConv2dNormActivation\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mConv3dNormActivation\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mFrozenBatchNorm2d\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mMLP\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mPermute\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mSqueezeExcitation\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 23\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mpoolers\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mMultiScaleRoIAlign\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 24\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mps_roi_align\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mps_roi_align\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mPSRoIAlign\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mps_roi_pool\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mps_roi_pool\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mPSRoIPool\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torchvision/ops/poolers.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_log_api_usage_once\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 10\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mroi_align\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mroi_align\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 11\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torchvision/ops/roi_align.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mnn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mis_compile_supported\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjit\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mannotations\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mBroadcastingList2\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodules\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_pair\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/_dynamo/__init__.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 13\u001b[0;31m from . import (\n\u001b[0m\u001b[1;32m 14\u001b[0m \u001b[0maot_compile\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/_dynamo/aot_compile.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 15\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconvert_frame\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mGraphRuntimeEnv\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 16\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgraph_utils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_graph_device_type\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpackage\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mSystemInfo\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/_dynamo/convert_frame.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 60\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mCallbackTrigger\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 61\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdistributed\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mget_compile_pg\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 62\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msymbolic_convert\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mTensorifyState\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 63\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_guards\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mcompile_context\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCompileContext\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCompileId\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtracing\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 64\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_logging\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mstructured\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/_dynamo/symbolic_convert.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_logging\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdynamo_profiler\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mDynamoProfilerState\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mFunctionTraceTiming\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dynamo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mObservedException\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTensorifyScalarRestartAnalysis\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_guards\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mInlinedCodeCache\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtracing\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTracingContext\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_logging\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstructured\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mdump_file\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/_dynamo/exc.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 44\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mcounters\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 45\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/_dynamo/utils.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 68\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_functorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 70\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msymbolic_shapes\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 71\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_pytree\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mpytree\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 72\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mfx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.12/dist-packages/torch/fx/experimental/symbolic_shapes.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_guards\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mShapeGuard\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mSLoc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mSource\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTracingContext\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 63\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_library\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfake_class_registry\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mFakeScriptObject\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 64\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_library\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopaque_object\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mis_opaque_value\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 65\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_logging\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mdtrace_structured\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mLazyString\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstructured\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrace_structured\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_subclasses\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmeta_utils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mis_sparse_any\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mImportError\u001b[0m: cannot import name 'is_opaque_value' from 'torch._library.opaque_object' (/usr/local/lib/python3.12/dist-packages/torch/_library/opaque_object.py)","","\u001b[0;31m---------------------------------------------------------------------------\u001b[0;32m\nNOTE: If your import is failing due to a missing package, you can\nmanually install dependencies using either !pip or !apt.\n\nTo view examples of installing some common dependencies, click the\n\"Open Examples\" button below.\n\u001b[0;31m---------------------------------------------------------------------------\u001b[0m\n"],"errorDetails":{"actions":[{"action":"open_url","actionText":"Open Examples","url":"/notebooks/snippets/importing_libraries.ipynb"}]}}]},{"cell_type":"code","source":["#@markdown # Cell 3: Load Gemma-4 Model (Heretic by default + Vision Latent Helpers)\n","\n","import torch\n","import os\n","from transformers import AutoProcessor, AutoModelForMultimodalLM\n","from google.colab import userdata\n","from PIL import Image\n","import base64\n","import io\n","\n","# ====================== VISION LATENT IMPROVEMENT HELPERS ======================\n","def get_optimal_vision_size(processor):\n"," \"\"\"Automatically detects the best resolution for Gemma-4's vision encoder.\"\"\"\n"," target_max_dim = 1024\n"," if hasattr(processor, \"image_processor\") and hasattr(processor.image_processor, \"size\"):\n"," size_cfg = processor.image_processor.size\n"," if isinstance(size_cfg, dict):\n"," dims = [v for v in size_cfg.values() if isinstance(v, (int, float))]\n"," if dims:\n"," target_max_dim = int(max(dims))\n"," elif isinstance(size_cfg, (int, float)):\n"," target_max_dim = int(size_cfg)\n"," return target_max_dim\n","\n","def image_to_data_url(img: Image.Image) -> str:\n"," \"\"\"Lossless PNG data URL β cleanest latent representation.\"\"\"\n"," buffered = io.BytesIO()\n"," img.save(buffered, format=\"PNG\")\n"," img_str = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n"," return f\"data:image/png;base64,{img_str}\"\n","\n","#@markdown **Choose your model version:**\n","use_heretic_version = True #@param {type:\"boolean\"}\n","\n","hf_token = os.environ.get(\"HF_TOKEN\") or userdata.get('HF_TOKEN')\n","\n","if use_heretic_version:\n"," model_id = \"coder3101/gemma-4-E2B-it-heretic\"\n"," print(\"π½ Loading **Gemma-4-E2B-Heretic** (your preferred uncensored version)...\")\n"," processor = AutoProcessor.from_pretrained(model_id, token=hf_token)\n"," model = AutoModelForMultimodalLM.from_pretrained(\n"," model_id,\n"," token=hf_token,\n"," torch_dtype=\"auto\", # fixed for consistency\n"," device_map=\"auto\",\n"," low_cpu_mem_usage=True,\n"," )\n","else:\n"," model_id = \"google/gemma-4-E2B-it\"\n"," print(\"π½ Loading **Official Google Gemma-4-E2B-it**...\")\n"," processor = AutoProcessor.from_pretrained(model_id, token=hf_token)\n"," model = AutoModelForMultimodalLM.from_pretrained(\n"," model_id,\n"," token=hf_token,\n"," torch_dtype=\"auto\",\n"," device_map=\"auto\",\n"," low_cpu_mem_usage=True,\n"," )\n","\n","print(\"β
Model loaded successfully!\")\n","print(f\" Model: {model_id}\")\n","print(f\" Device: {model.device}\")\n","print(f\" VRAM used: {torch.cuda.memory_allocated() / 1024**3:.2f} GB\")\n","print(\" β
Vision latent helpers are now active\")\n","print(\"\\nβ
Cell 3 complete!\")"],"metadata":{"id":"sZI_cFZhms97"},"execution_count":null,"outputs":[]}]}
|