Spaces:
				
			
			
	
			
			
					
		Running
		
			on 
			
			CPU Upgrade
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
			on 
			
			CPU Upgrade
	| import csv | |
| import datetime | |
| import os | |
| from typing import Optional | |
| import gradio as gr | |
| from onnx_export import convert | |
| from huggingface_hub import HfApi, Repository | |
| # TODO: save stats about the Space? | |
| """ | |
| DATASET_REPO_URL = "https://huggingface.co/datasets/safetensors/conversions" | |
| DATA_FILENAME = "data.csv" | |
| DATA_FILE = os.path.join("data", DATA_FILENAME) | |
| HF_TOKEN = os.environ.get("HF_TOKEN") | |
| repo: Optional[Repository] = None | |
| if HF_TOKEN: | |
| repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN) | |
| """ | |
| def onnx_export(token: str, model_id: str, task: str) -> str: | |
| if token == "" or model_id == "": | |
| return """ | |
| ### Invalid input π | |
| Please fill a token and model_id. | |
| """ | |
| try: | |
| api = HfApi(token=token) | |
| error, commit_info = convert(api=api, model_id=model_id, task=task) | |
| if error != "0": | |
| return error | |
| print("[commit_info]", commit_info) | |
| return f"### Success π₯ Yay! This model was successfully converted and a PR was open using your token, here: {commit_info.pr_url}]({commit_info.pr_url})" | |
| except Exception as e: | |
| return f"### Error: {e}" | |
| """ | |
| # save in a private dataset: | |
| if repo is not None: | |
| repo.git_pull(rebase=True) | |
| with open(DATA_FILE, "a") as csvfile: | |
| writer = csv.DictWriter( | |
| csvfile, fieldnames=["model_id", "pr_url", "time"] | |
| ) | |
| writer.writerow( | |
| { | |
| "model_id": model_id, | |
| "pr_url": commit_info.pr_url, | |
| "time": str(datetime.now()), | |
| } | |
| ) | |
| commit_url = repo.push_to_hub() | |
| print("[dataset]", commit_url) | |
| return f"### Success π₯ Yay! This model was successfully converted and a PR was open using your token, here: {commit_info.pr_url}]({commit_info.pr_url})" | |
| """ | |
| DESCRIPTION = """ | |
| The steps are the following: | |
| - Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo. | |
| - Input a model id from the Hub | |
| - If necessary, input the task for this model. | |
| - Click "Convert to ONNX" | |
| - That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR! | |
| """ | |
| demo = gr.Interface( | |
| title="POC to convert any PyTorch model to ONNX", | |
| description=DESCRIPTION, | |
| allow_flagging="never", | |
| article="Check out the [Optimum repo on GitHub](https://github.com/huggingface/optimum)", | |
| inputs=[ | |
| gr.Text(max_lines=1, label="Hugging Face token"), | |
| gr.Text(max_lines=1, label="model_id"), | |
| gr.Text(value="auto", max_lines=1, label="task (can be left blank, will auto-infer)") | |
| ], | |
| outputs=[gr.Markdown(label="output")], | |
| fn=onnx_export, | |
| ) | |
| demo.launch() | 
