Tonic commited on
Commit
d2756e5
·
unverified ·
1 Parent(s): e1ae310
Files changed (2) hide show
  1. app.py +81 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline, set_seed
3
+ import torch
4
+
5
+ description = "The models are intended for both research and commercial use in any of the languages included in the training data. The base models are intended either for language generation or to be further fine-tuned for specific use-cases. The instruction-tuned variants can be used as general-purpose assistants, as long as the user is fully aware of the model’s limitations."
6
+
7
+ joinus = """
8
+ ## Join us :
9
+ 🌟TeamTonic🌟 is always making cool demos! Join our active builder's 🛠️community 👻 [![Join us on Discord](https://img.shields.io/discord/1109943800132010065?label=Discord&logo=discord&style=flat-square)](https://discord.gg/qdfnvSPcqP) On 🤗Huggingface:[MultiTransformer](https://huggingface.co/MultiTransformer) On 🌐Github: [Tonic-AI](https://github.com/tonic-ai) & contribute to🌟 [Build Tonic](https://git.tonic-ai.com/contribute)🤗Big thanks to Yuvi Sharma and all the folks at huggingface for the community grant 🤗
10
+ """
11
+
12
+ device = "cuda" if torch.cuda.is_available() else "cpu"
13
+ model_id = "BSC-LT/salamandra-2b"
14
+ generator = pipeline("text-generation", model_id, device_map="auto")
15
+
16
+ def generate_text(prompt, temperature, top_p, max_new_tokens, repetition_penalty):
17
+ # set_seed(42)
18
+ generation_args = {
19
+ "temperature": temperature,
20
+ "top_p": top_p,
21
+ "max_new_tokens": max_new_tokens,
22
+ "repetition_penalty": repetition_penalty,
23
+ "do_sample": True
24
+ }
25
+ output = generator(prompt, **generation_args)
26
+ return output[0]["generated_text"]
27
+
28
+ def update_output(prompt, temperature, top_p, max_new_tokens, repetition_penalty):
29
+ generated_text = generate_text(prompt, temperature, top_p, max_new_tokens, repetition_penalty)
30
+ return generated_text
31
+
32
+ def update_prompt(example):
33
+ return example
34
+
35
+ with gr.Blocks() as demo:
36
+ gr.Markdown("# 🙋🏻‍♂️Welcome to Tonic's 📲🦎Salamandra-2b On-Device Demo")
37
+ with gr.Row():
38
+ with gr.Column(scale=1):
39
+ with gr.Group():
40
+ gr.Markdown(description)
41
+ with gr.Column(scale=1):
42
+ with gr.Group():
43
+ gr.Markdown(joinus)
44
+
45
+ with gr.Row():
46
+ with gr.Column(scale=1):
47
+ prompt = gr.Textbox(lines=5, label="🙋🏻‍♂️Input Prompt")
48
+ generate_button = gr.Button("Try 📲🦎Salamandra-2b")
49
+ with gr.Accordion("🧪Parameters", open=False):
50
+ # with gr.Column(scale=1):
51
+ temperature = gr.Slider(0.0, 1.0, value=0.7, label="🌡️Temperature")
52
+ top_p = gr.Slider(0.0, 1.0, value=0.95, label="⚛️Top P")
53
+ max_new_tokens = gr.Slider(1, 4096, value=350, step=1, label="🤑Max New Tokens")
54
+ repetition_penalty = gr.Slider(1.0, 2.0, value=1.2, label="🦜Repetition Penalty")
55
+
56
+ with gr.Column(scale=1):
57
+ output = gr.Textbox(lines=10, label=" 📲🦎Salamandra-2b")
58
+
59
+ generate_button.click(
60
+ update_output,
61
+ inputs=[prompt, temperature, top_p, max_new_tokens, repetition_penalty],
62
+ outputs=output
63
+ )
64
+
65
+ examples = gr.Examples(
66
+ examples=[
67
+ ["Todo el mundo sabe que vivir en Barcelona es"],
68
+ ["¿Pueblo o ciudad? Una ventaja de vivir en la ciudad es que hay muchas oportunidades de ocio y empleo, así como una gran diversidad de comercios para todos los gustos. Sin embargo, las ciudades suelen ser "],
69
+ ["Llegir ens proporciona"],
70
+ ["What I find more fascinating about languages is that"],
71
+ ["La vie peut être"],
72
+ ["The future of AI is"]
73
+ ],
74
+ inputs=prompt,
75
+ outputs=prompt,
76
+ fn=update_prompt,
77
+ label="Example Prompts"
78
+ )
79
+
80
+ if __name__ == "__main__":
81
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ transformers
2
+ torch
3
+ accelerate
4
+ sentencepiece
5
+ protobuf