Upload pipeline.py with huggingface_hub
Browse files- pipeline.py +11 -15
pipeline.py
CHANGED
|
@@ -42,7 +42,6 @@ Supported HF deployment surfaces
|
|
| 42 |
|
| 43 |
from __future__ import annotations
|
| 44 |
|
| 45 |
-
import importlib
|
| 46 |
import sys
|
| 47 |
from pathlib import Path
|
| 48 |
from typing import Union
|
|
@@ -231,14 +230,12 @@ class AdmeshIntentPipeline(_HFPipeline):
|
|
| 231 |
stacklevel=2,
|
| 232 |
)
|
| 233 |
else:
|
| 234 |
-
|
| 235 |
-
get_multitask_runtime
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
get_multitask_runtime = importlib.import_module("multitask_runtime").get_multitask_runtime
|
| 241 |
-
get_head = importlib.import_module("model_runtime").get_head
|
| 242 |
|
| 243 |
rt = get_multitask_runtime()
|
| 244 |
if rt._model is not None:
|
|
@@ -303,12 +300,11 @@ class AdmeshIntentPipeline(_HFPipeline):
|
|
| 303 |
|
| 304 |
def _ensure_loaded(self) -> None:
|
| 305 |
if self._classify_fn is None:
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
self._classify_fn = importlib.import_module("combined_inference").classify_query
|
| 312 |
|
| 313 |
def __repr__(self) -> str:
|
| 314 |
state = "loaded" if self._classify_fn is not None else "not yet loaded"
|
|
|
|
| 42 |
|
| 43 |
from __future__ import annotations
|
| 44 |
|
|
|
|
| 45 |
import sys
|
| 46 |
from pathlib import Path
|
| 47 |
from typing import Union
|
|
|
|
| 230 |
stacklevel=2,
|
| 231 |
)
|
| 232 |
else:
|
| 233 |
+
try:
|
| 234 |
+
from .multitask_runtime import get_multitask_runtime # type: ignore
|
| 235 |
+
from .model_runtime import get_head # type: ignore
|
| 236 |
+
except ImportError:
|
| 237 |
+
from multitask_runtime import get_multitask_runtime
|
| 238 |
+
from model_runtime import get_head
|
|
|
|
|
|
|
| 239 |
|
| 240 |
rt = get_multitask_runtime()
|
| 241 |
if rt._model is not None:
|
|
|
|
| 300 |
|
| 301 |
def _ensure_loaded(self) -> None:
|
| 302 |
if self._classify_fn is None:
|
| 303 |
+
try:
|
| 304 |
+
from .combined_inference import classify_query # type: ignore
|
| 305 |
+
except ImportError:
|
| 306 |
+
from combined_inference import classify_query
|
| 307 |
+
self._classify_fn = classify_query
|
|
|
|
| 308 |
|
| 309 |
def __repr__(self) -> str:
|
| 310 |
state = "loaded" if self._classify_fn is not None else "not yet loaded"
|