Spaces:
Runtime error
Runtime error
from pathlib import Path | |
import gradio as gr | |
from huggingface_hub import HfApi, Repository | |
from huggingface_hub.utils import RepositoryNotFoundError | |
from convert import convert | |
REPO_PATH = Path("bloomz.cpp") | |
repo = Repository(local_dir="bloomz.cpp", clone_from="https://github.com/NouamaneTazi/bloomz.cpp") | |
def run(token: str, model_id: str, precision: str, quantization: bool) -> str: | |
if token == "" or model_id == "": | |
return """ | |
### Invalid input π | |
Please fill a token and model_id. | |
""" | |
api = HfApi(token=token) | |
try: | |
# TODO: make a PR to bloomz.cpp to be able to pass a token | |
api.model_info(repo_id=model_id, token=False) # only public repos are accessible | |
except RepositoryNotFoundError: | |
return f""" | |
### Error π’π’π’ | |
Repository {model_id} not found. Only public models are convertible at the moment. | |
""" | |
try: | |
model_path = convert(model_id=model_id, precision=precision, quantization=quantization) | |
print("[commit_info]", model_path) | |
return f""" | |
### Success π₯ | |
""" | |
return f""" | |
### Success π₯ | |
Yay! This model was successfully converted and a PR was open using your token, here: | |
# [{commit_info.pr_url}]({commit_info.pr_url}) | |
""" | |
except Exception as e: | |
return f""" | |
### Error π’π’π’ | |
{e} | |
""" | |
DESCRIPTION = """ | |
The steps are the following: | |
- Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo. | |
- Input a model id from the Hub | |
- Click "Submit" | |
- That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR π₯ | |
β οΈ For now only `pytorch_model.bin` files are supported but we'll extend in the future. | |
""" | |
demo = gr.Interface( | |
title="Convert any BLOOM-like model to be compatible with bloomz.cpp", | |
description=DESCRIPTION, | |
allow_flagging="never", | |
article="Check out the [bloomz.cpp](https://github.com/NouamaneTazi/bloomz.cpp) repo on GitHub", | |
inputs=[ | |
gr.Text(max_lines=1, label="your hf_token"), | |
gr.Text(max_lines=1, label="model_id (e.g.: bigscience/bloomz-7b1)"), | |
gr.Radio(choices=["FP16", "FP32"], label="Precision", value="FP16"), | |
gr.Checkbox(value=False, label="4-bits quantization"), | |
], | |
outputs=[gr.Markdown(label="output")], | |
fn=run, | |
).queue() | |
demo.launch() | |