aisltnab commited on
Commit
fb04b75
1 Parent(s): cee67d6

Create app_old.py

Browse files
Files changed (1) hide show
  1. app_old.py +98 -0
app_old.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ from datetime import datetime
3
+ import os
4
+ from typing import Optional
5
+ import gradio as gr
6
+
7
+ from convert import convert
8
+ from huggingface_hub import HfApi, Repository
9
+
10
+
11
+ DATASET_REPO_URL = "https://huggingface.co/datasets/safetensors/conversions"
12
+ DATA_FILENAME = "data.csv"
13
+ DATA_FILE = os.path.join("data", DATA_FILENAME)
14
+
15
+ HF_TOKEN = os.environ.get("HF_TOKEN")
16
+
17
+ repo: Optional[Repository] = None
18
+ # TODO
19
+ if False and HF_TOKEN:
20
+ repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
21
+
22
+
23
+ def run(model_id: str, is_private: bool, token: Optional[str] = None) -> str:
24
+ if model_id == "":
25
+ return """
26
+ ### Invalid input 🐞
27
+
28
+ Please fill a token and model_id.
29
+ """
30
+ try:
31
+ if is_private:
32
+ api = HfApi(token=token)
33
+ else:
34
+ api = HfApi(token=HF_TOKEN)
35
+ hf_is_private = api.model_info(repo_id=model_id).private
36
+ if is_private and not hf_is_private:
37
+ # This model is NOT private
38
+ # Change the token so we make the PR on behalf of the bot.
39
+ api = HfApi(token=HF_TOKEN)
40
+
41
+ print("is_private", is_private)
42
+
43
+ commit_info, errors = convert(api=api, model_id=model_id)
44
+ print("[commit_info]", commit_info)
45
+
46
+
47
+ string = f"""
48
+ ### Success 🔥
49
+ Yay! This model was successfully converted and a PR was open using your token, here:
50
+ [{commit_info.pr_url}]({commit_info.pr_url})
51
+ """
52
+ if errors:
53
+ string += "\nErrors during conversion:\n"
54
+ string += "\n".join(f"Error while converting {filename}: {e}, skipped conversion" for filename, e in errors)
55
+ return string
56
+ except Exception as e:
57
+ return f"""
58
+ ### Error 😢😢😢
59
+
60
+ {e}
61
+ """
62
+
63
+
64
+ DESCRIPTION = """
65
+ The steps are the following:
66
+ - Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo.
67
+ - Input a model id from the Hub
68
+ - Click "Submit"
69
+ - That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR 🔥
70
+ ⚠️ For now only `pytorch_model.bin` files are supported but we'll extend in the future.
71
+ """
72
+
73
+ title="Convert any model to Safetensors and open a PR"
74
+ allow_flagging="never"
75
+
76
+ def token_text(visible=False):
77
+ return gr.Text(max_lines=1, label="your_hf_token", visible=visible)
78
+
79
+ with gr.Blocks(title=title) as demo:
80
+ description = gr.Markdown(f"""# {title}""")
81
+ description = gr.Markdown(DESCRIPTION)
82
+
83
+ with gr.Row() as r:
84
+ with gr.Column() as c:
85
+ model_id = gr.Text(max_lines=1, label="model_id")
86
+ is_private = gr.Checkbox(label="Private model")
87
+ token = token_text()
88
+ with gr.Row() as c:
89
+ clean = gr.ClearButton()
90
+ submit = gr.Button("Submit", variant="primary")
91
+
92
+ with gr.Column() as d:
93
+ output = gr.Markdown()
94
+
95
+ is_private.change(lambda s: token_text(s), inputs=is_private, outputs=token)
96
+ submit.click(run, inputs=[model_id, is_private, token], outputs=output, concurrency_limit=1)
97
+
98
+ demo.queue(max_size=10).launch(show_api=True)