Narsil HF staff commited on
Commit
cee67d6
1 Parent(s): 0d3afce

Moving to gradio 4.4.1

Browse files

- Token only needed for private.
- Correctly handling private repos, but making the bot the operator for
all public + gated models

Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +38 -36
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 🐶
4
  colorFrom: yellow
5
  colorTo: red
6
  sdk: gradio
7
- sdk_version: 3.36.1
8
  app_file: app.py
9
  pinned: true
10
  license: apache-2.0
 
4
  colorFrom: yellow
5
  colorTo: red
6
  sdk: gradio
7
+ sdk_version: 4.4.1
8
  app_file: app.py
9
  pinned: true
10
  license: apache-2.0
app.py CHANGED
@@ -20,39 +20,29 @@ if False and HF_TOKEN:
20
  repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
21
 
22
 
23
- def run(token: str, model_id: str) -> str:
24
- if token == "" or model_id == "":
25
  return """
26
  ### Invalid input 🐞
27
 
28
  Please fill a token and model_id.
29
  """
30
  try:
31
- api = HfApi(token=token)
32
- is_private = api.model_info(repo_id=model_id).private
 
 
 
 
 
 
 
 
33
  print("is_private", is_private)
34
 
35
  commit_info, errors = convert(api=api, model_id=model_id)
36
  print("[commit_info]", commit_info)
37
 
38
- # save in a (public) dataset:
39
- # TODO False because of LFS bug.
40
- if False and repo is not None and not is_private:
41
- repo.git_pull(rebase=True)
42
- print("pulled")
43
- with open(DATA_FILE, "a") as csvfile:
44
- writer = csv.DictWriter(
45
- csvfile, fieldnames=["model_id", "pr_url", "time"]
46
- )
47
- writer.writerow(
48
- {
49
- "model_id": model_id,
50
- "pr_url": commit_info.pr_url,
51
- "time": str(datetime.now()),
52
- }
53
- )
54
- commit_url = repo.push_to_hub()
55
- print("[dataset]", commit_url)
56
 
57
  string = f"""
58
  ### Success 🔥
@@ -84,17 +74,29 @@ The steps are the following:
84
  ⚠️ For now only `pytorch_model.bin` files are supported but we'll extend in the future.
85
  """
86
 
87
- demo = gr.Interface(
88
- title="Convert any model to Safetensors and open a PR",
89
- description=DESCRIPTION,
90
- allow_flagging="never",
91
- article="Check out the [Safetensors repo on GitHub](https://github.com/huggingface/safetensors)",
92
- inputs=[
93
- gr.Text(max_lines=1, label="your_hf_token"),
94
- gr.Text(max_lines=1, label="model_id"),
95
- ],
96
- outputs=[gr.Markdown(label="output")],
97
- fn=run,
98
- ).queue(max_size=10, concurrency_count=1)
99
-
100
- demo.launch(show_api=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
21
 
22
 
23
+ def run(model_id: str, is_private: bool, token: Optional[str] = None) -> str:
24
+ if model_id == "":
25
  return """
26
  ### Invalid input 🐞
27
 
28
  Please fill a token and model_id.
29
  """
30
  try:
31
+ if is_private:
32
+ api = HfApi(token=token)
33
+ else:
34
+ api = HfApi(token=HF_TOKEN)
35
+ hf_is_private = api.model_info(repo_id=model_id).private
36
+ if is_private and not hf_is_private:
37
+ # This model is NOT private
38
+ # Change the token so we make the PR on behalf of the bot.
39
+ api = HfApi(token=HF_TOKEN)
40
+
41
  print("is_private", is_private)
42
 
43
  commit_info, errors = convert(api=api, model_id=model_id)
44
  print("[commit_info]", commit_info)
45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
  string = f"""
48
  ### Success 🔥
 
74
  ⚠️ For now only `pytorch_model.bin` files are supported but we'll extend in the future.
75
  """
76
 
77
+ title="Convert any model to Safetensors and open a PR"
78
+ allow_flagging="never"
79
+
80
+ def token_text(visible=False):
81
+ return gr.Text(max_lines=1, label="your_hf_token", visible=visible)
82
+
83
+ with gr.Blocks(title=title) as demo:
84
+ description = gr.Markdown(f"""# {title}""")
85
+ description = gr.Markdown(DESCRIPTION)
86
+
87
+ with gr.Row() as r:
88
+ with gr.Column() as c:
89
+ model_id = gr.Text(max_lines=1, label="model_id")
90
+ is_private = gr.Checkbox(label="Private model")
91
+ token = token_text()
92
+ with gr.Row() as c:
93
+ clean = gr.ClearButton()
94
+ submit = gr.Button("Submit", variant="primary")
95
+
96
+ with gr.Column() as d:
97
+ output = gr.Markdown()
98
+
99
+ is_private.change(lambda s: token_text(s), inputs=is_private, outputs=token)
100
+ submit.click(run, inputs=[model_id, is_private, token], outputs=output, concurrency_limit=1)
101
+
102
+ demo.queue(max_size=10).launch(show_api=True)