Spaces:
Running
Running
changes in ultimate sota
Browse files- ultimate_sota_training.py +22 -13
ultimate_sota_training.py
CHANGED
|
@@ -98,21 +98,30 @@ import torch
|
|
| 98 |
from datasets import Dataset
|
| 99 |
|
| 100 |
# --- CRITICAL FIXES FOR HF JOBS ---
|
| 101 |
-
# 1. Mock vllm: TRL's GRPOTrainer (v0.18+) has a buggy import path that hard-fails if vllm is missing
|
| 102 |
-
#
|
| 103 |
import sys
|
|
|
|
|
|
|
| 104 |
from unittest.mock import MagicMock
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 116 |
import transformers.utils.hub
|
| 117 |
if not hasattr(transformers.utils.hub, "TRANSFORMERS_CACHE"):
|
| 118 |
transformers.utils.hub.TRANSFORMERS_CACHE = "/tmp"
|
|
|
|
| 98 |
from datasets import Dataset
|
| 99 |
|
| 100 |
# --- CRITICAL FIXES FOR HF JOBS ---
|
| 101 |
+
# 1. Mock vllm: TRL's GRPOTrainer (v0.18+) has a buggy import path that hard-fails if vllm is missing.
|
| 102 |
+
# We must provide a mock that satisfies both 'import' and 'importlib.util.find_spec'.
|
| 103 |
import sys
|
| 104 |
+
import types
|
| 105 |
+
import importlib.machinery
|
| 106 |
from unittest.mock import MagicMock
|
| 107 |
+
|
| 108 |
+
def mock_vllm_hierarchy():
|
| 109 |
+
for m_name in [
|
| 110 |
+
"vllm",
|
| 111 |
+
"vllm.distributed",
|
| 112 |
+
"vllm.distributed.device_communicators",
|
| 113 |
+
"vllm.distributed.device_communicators.pynccl",
|
| 114 |
+
"vllm.model_executor",
|
| 115 |
+
"vllm.model_executor.parallel_utils",
|
| 116 |
+
]:
|
| 117 |
+
mock_m = MagicMock(spec=types.ModuleType)
|
| 118 |
+
mock_m.__name__ = m_name
|
| 119 |
+
mock_m.__spec__ = importlib.machinery.ModuleSpec(m_name, None)
|
| 120 |
+
sys.modules[m_name] = mock_m
|
| 121 |
+
|
| 122 |
+
mock_vllm_hierarchy()
|
| 123 |
+
|
| 124 |
+
# 2. Mock llm_blender: Fix for TRANSFORMERS_CACHE removal in transformers 4.40+.
|
| 125 |
import transformers.utils.hub
|
| 126 |
if not hasattr(transformers.utils.hub, "TRANSFORMERS_CACHE"):
|
| 127 |
transformers.utils.hub.TRANSFORMERS_CACHE = "/tmp"
|