Spaces:
Running
Running
set pipeline tag in model card
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import os
|
|
2 |
import shutil
|
3 |
import torch
|
4 |
import gradio as gr
|
5 |
-
from huggingface_hub import HfApi, whoami, ModelCard
|
6 |
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
7 |
from textwrap import dedent
|
8 |
from pathlib import Path
|
@@ -57,7 +57,7 @@ def export(model_id: str, private_repo: bool, overwritte: bool, oauth_token: gr.
|
|
57 |
|
58 |
if library_name == "diffusers":
|
59 |
auto_model_class = "OVDiffusionPipeline"
|
60 |
-
|
61 |
task = TasksManager.infer_task_from_model(model_id, token=oauth_token.token)
|
62 |
|
63 |
if task == "text2text-generation":
|
@@ -67,7 +67,9 @@ def export(model_id: str, private_repo: bool, overwritte: bool, oauth_token: gr.
|
|
67 |
return f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported"
|
68 |
|
69 |
auto_model_class = _HEAD_TO_AUTOMODELS[task]
|
70 |
-
|
|
|
|
|
71 |
|
72 |
ov_files = _find_files_matching_pattern(
|
73 |
model_id,
|
@@ -129,6 +131,11 @@ def export(model_id: str, private_repo: bool, overwritte: bool, oauth_token: gr.
|
|
129 |
card.data.tags.append("openvino")
|
130 |
card.data.tags.append("openvino-export")
|
131 |
card.data.base_model = model_id
|
|
|
|
|
|
|
|
|
|
|
132 |
card.text = dedent(
|
133 |
f"""
|
134 |
This model was converted to OpenVINO from [`{model_id}`](https://huggingface.co/{model_id}) using [optimum-intel](https://github.com/huggingface/optimum-intel)
|
|
|
2 |
import shutil
|
3 |
import torch
|
4 |
import gradio as gr
|
5 |
+
from huggingface_hub import HfApi, whoami, ModelCard, model_info
|
6 |
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
7 |
from textwrap import dedent
|
8 |
from pathlib import Path
|
|
|
57 |
|
58 |
if library_name == "diffusers":
|
59 |
auto_model_class = "OVDiffusionPipeline"
|
60 |
+
elif library_name == "transformers":
|
61 |
task = TasksManager.infer_task_from_model(model_id, token=oauth_token.token)
|
62 |
|
63 |
if task == "text2text-generation":
|
|
|
67 |
return f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported"
|
68 |
|
69 |
auto_model_class = _HEAD_TO_AUTOMODELS[task]
|
70 |
+
else:
|
71 |
+
# TODO: add sentence-transformers and timm support in space
|
72 |
+
return f"Library {library_name} not yet supported"
|
73 |
|
74 |
ov_files = _find_files_matching_pattern(
|
75 |
model_id,
|
|
|
131 |
card.data.tags.append("openvino")
|
132 |
card.data.tags.append("openvino-export")
|
133 |
card.data.base_model = model_id
|
134 |
+
|
135 |
+
pipeline_tag = getattr(model_info(model_id, token=oauth_token.token), "pipeline_tag", None)
|
136 |
+
if pipeline_tag is not None:
|
137 |
+
card.data.pipeline_tag = pipeline_tag
|
138 |
+
|
139 |
card.text = dedent(
|
140 |
f"""
|
141 |
This model was converted to OpenVINO from [`{model_id}`](https://huggingface.co/{model_id}) using [optimum-intel](https://github.com/huggingface/optimum-intel)
|