joaogante HF staff commited on
Commit
6c981de
β€’
1 Parent(s): 632990b
Files changed (1) hide show
  1. app.py +59 -4
app.py CHANGED
@@ -1,7 +1,62 @@
1
  import gradio as gr
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
 
3
+ def create_medusa_heads(model_id: str):
4
+ return "", ""
5
 
6
+ def run(model_id: str) -> str:
7
+ if model_id == "":
8
+ return """
9
+ ### Invalid input 🐞
10
+
11
+ Please fill a model_id.
12
+ """
13
+ try:
14
+ commit_info, errors = create_medusa_heads(model_id=model_id)
15
+ print("[commit_info]", commit_info)
16
+
17
+ string = f"""
18
+ ### Success πŸ”₯
19
+
20
+ Yay! This model was successfully converted and a PR was open using your token, here:
21
+
22
+ [{commit_info.pr_url}]({commit_info.pr_url})
23
+ """
24
+ if errors:
25
+ string += "\nErrors during conversion:\n"
26
+ string += "\n".join(f"Error while converting {filename}: {e}, skipped conversion" for filename, e in errors)
27
+ return string
28
+ except Exception as e:
29
+ return f"""
30
+ ### Error 😒😒😒
31
+
32
+ {e}
33
+ """
34
+
35
+
36
+ DESCRIPTION = """
37
+ The step to create [medusa](https://sites.google.com/view/medusa-llm) heads are the following:
38
+
39
+ - Input a public model id from the Hub
40
+ - Click "Submit"
41
+ - That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the new repo πŸ”₯
42
+ """
43
+
44
+ title="Create LLM medusa heads in a new repo 🐍"
45
+
46
+ with gr.Blocks(title=title) as demo:
47
+ description = gr.Markdown(f"""# {title}""")
48
+ description = gr.Markdown(DESCRIPTION)
49
+
50
+ with gr.Row() as r:
51
+ with gr.Column() as c:
52
+ model_id = gr.Text(max_lines=1, label="model_id")
53
+ with gr.Row() as c:
54
+ clean = gr.ClearButton()
55
+ submit = gr.Button("Submit", variant="primary")
56
+
57
+ with gr.Column() as d:
58
+ output = gr.Markdown()
59
+
60
+ submit.click(run, inputs=[model_id], outputs=output, concurrency_limit=1)
61
+
62
+ demo.queue(max_size=10).launch(show_api=True)