Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -27,19 +27,19 @@ text_category_example = [[" anadolu_efes e 18 lik star ! beko_basketbol_ligi nde
|
|
27 |
|
28 |
|
29 |
@spaces.GPU
|
30 |
-
def nli(input, model_choice="turna_nli_nli_tr"):
|
31 |
|
32 |
if model_choice=="turna_nli_nli_tr":
|
33 |
-
nli_model = pipeline(model="boun-tabi-LMG/turna_nli_nli_tr", device=0)
|
34 |
return nli_model(input)[0]["generated_text"]
|
35 |
else:
|
36 |
-
stsb_model = pipeline(model="boun-tabi-LMG/turna_semantic_similarity_stsb_tr", device=0)
|
37 |
|
38 |
return stsb_model(input)[0]["generated_text"]
|
39 |
|
40 |
|
41 |
@spaces.GPU
|
42 |
-
def sentiment_analysis(input, model_choice="turna_classification_17bintweet_sentiment"):
|
43 |
if model_choice=="turna_classification_17bintweet_sentiment":
|
44 |
sentiment_model = pipeline(model="boun-tabi-LMG/turna_classification_17bintweet_sentiment", device=0)
|
45 |
|
@@ -96,15 +96,16 @@ def categorize(input):
|
|
96 |
return ttc(input)[0]["generated_text"]
|
97 |
|
98 |
@spaces.GPU
|
99 |
-
def turna(input):
|
100 |
turna = pipeline(model="boun-tabi-LMG/TURNA", device=0)
|
101 |
|
102 |
-
return turna(input)[0]["generated_text"]
|
103 |
|
104 |
|
105 |
with gr.Blocks(theme="abidlabs/Lime") as demo:
|
106 |
gr.Markdown("# TURNA 🐦")
|
107 |
gr.Markdown(DESCRIPTION)
|
|
|
108 |
|
109 |
with gr.Tab("Sentiment Analysis"):
|
110 |
gr.Markdown("TURNA fine-tuned on sentiment analysis. Enter text to analyse sentiment and pick the model (tweets or product reviews).")
|
@@ -123,14 +124,31 @@ with gr.Blocks(theme="abidlabs/Lime") as demo:
|
|
123 |
gr.Markdown("Pre-trained TURNA. Enter text to start generating.")
|
124 |
with gr.Column():
|
125 |
with gr.Row():
|
126 |
-
|
127 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
-
|
|
|
|
|
130 |
text_gen_output = gr.Textbox(label="Text Generation Output")
|
131 |
-
text_gen_submit.click(turna, inputs=[text_gen_input], outputs=text_gen_output)
|
132 |
text_gen_example = [["Bir varmış, bir yokmuş, evvel zaman içinde, kalbur saman içinde, uzak diyarların birinde bir turna"]]
|
133 |
-
text_gen_examples = gr.Examples(examples = text_gen_example, inputs = [text_gen_input], outputs=text_gen_output, fn=turna)
|
134 |
|
135 |
with gr.Tab("Text Categorization"):
|
136 |
gr.Markdown("TURNA fine-tuned on text categorization. Enter text to categorize text or try the example.")
|
|
|
27 |
|
28 |
|
29 |
@spaces.GPU
|
30 |
+
def nli(input, model_choice="turna_nli_nli_tr", generation_params):
|
31 |
|
32 |
if model_choice=="turna_nli_nli_tr":
|
33 |
+
nli_model = pipeline(model="boun-tabi-LMG/turna_nli_nli_tr", device=0, **generation_params)
|
34 |
return nli_model(input)[0]["generated_text"]
|
35 |
else:
|
36 |
+
stsb_model = pipeline(model="boun-tabi-LMG/turna_semantic_similarity_stsb_tr", device=0, **generation_params)
|
37 |
|
38 |
return stsb_model(input)[0]["generated_text"]
|
39 |
|
40 |
|
41 |
@spaces.GPU
|
42 |
+
def sentiment_analysis(input, model_choice="turna_classification_17bintweet_sentiment", generation_params):
|
43 |
if model_choice=="turna_classification_17bintweet_sentiment":
|
44 |
sentiment_model = pipeline(model="boun-tabi-LMG/turna_classification_17bintweet_sentiment", device=0)
|
45 |
|
|
|
96 |
return ttc(input)[0]["generated_text"]
|
97 |
|
98 |
@spaces.GPU
|
99 |
+
def turna(input, generation_params):
|
100 |
turna = pipeline(model="boun-tabi-LMG/TURNA", device=0)
|
101 |
|
102 |
+
return turna(input, **generation_params)[0]["generated_text"]
|
103 |
|
104 |
|
105 |
with gr.Blocks(theme="abidlabs/Lime") as demo:
|
106 |
gr.Markdown("# TURNA 🐦")
|
107 |
gr.Markdown(DESCRIPTION)
|
108 |
+
|
109 |
|
110 |
with gr.Tab("Sentiment Analysis"):
|
111 |
gr.Markdown("TURNA fine-tuned on sentiment analysis. Enter text to analyse sentiment and pick the model (tweets or product reviews).")
|
|
|
124 |
gr.Markdown("Pre-trained TURNA. Enter text to start generating.")
|
125 |
with gr.Column():
|
126 |
with gr.Row():
|
127 |
+
max_new_tokens = gr.Slider(label = "Maximum length",
|
128 |
+
minimum = 0,
|
129 |
+
maximum = 512,
|
130 |
+
value = 128)
|
131 |
+
length_penalty = gr.Slider(label = "Length penalty",
|
132 |
+
value=1.0)
|
133 |
+
top_k = gr.Slider("Top-k", value=10)
|
134 |
+
top_p = gr.Slider("Top-p", value=0.95)
|
135 |
+
temp = gr.Slider(label = "Temperature", value=1.0, minimum=0.1, maximum=100.0)
|
136 |
+
repetition_penalty = gr.Slider("Repetition Penalty", minimum=0.0, value=3.0, step=0.1)
|
137 |
+
num_beams = gr.Slider(label = "Number of beams", minimum=1,
|
138 |
+
maximum=10, value=3)
|
139 |
+
do_sample = gr.Radio(choices = [True, False], value = True, label = "Sampling")
|
140 |
+
generation_params = {"max_new_tokens":max_new_tokens, length_penalty:"length_penalty",
|
141 |
+
"top_k": top_k, "top_p": top_p, "temp": temp, "num_beams": num_beams,
|
142 |
+
"do_sample": do_sample}
|
143 |
+
with gr.Row():
|
144 |
|
145 |
+
text_gen_input = gr.Textbox(label="Text Generation Input")
|
146 |
+
|
147 |
+
text_gen_submit = gr.Button()
|
148 |
text_gen_output = gr.Textbox(label="Text Generation Output")
|
149 |
+
text_gen_submit.click(turna, inputs=[text_gen_input, generation_params], outputs=text_gen_output)
|
150 |
text_gen_example = [["Bir varmış, bir yokmuş, evvel zaman içinde, kalbur saman içinde, uzak diyarların birinde bir turna"]]
|
151 |
+
text_gen_examples = gr.Examples(examples = text_gen_example, inputs = [text_gen_input, generation_params], outputs=text_gen_output, fn=turna)
|
152 |
|
153 |
with gr.Tab("Text Categorization"):
|
154 |
gr.Markdown("TURNA fine-tuned on text categorization. Enter text to categorize text or try the example.")
|