Mr-Vicky-01 commited on
Commit
e87650b
·
verified ·
1 Parent(s): 579a648

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -8
app.py CHANGED
@@ -1,20 +1,21 @@
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
  import gradio as gr
 
3
  checkpoint = "Mr-Vicky-01/English-Tamil-Translator"
4
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
5
- model = AutoModelForSeq2SeqLM.from_pretrained(r"Finetuned_model/")
6
 
7
  def language_translator(text):
8
  tokenized = tokenizer([text], return_tensors='pt')
9
  out = model.generate(**tokenized, max_length=128)
10
  return tokenizer.decode(out[0],skip_special_tokens=True)
11
 
12
- # examples = [
13
- # ["hello everyone"]
14
- # ["hardwork never fails."],
15
- # ["A room without books is like a body without a soul."],
16
- # ["The Sun is approximately 4.6 billion years older than Earth."],
17
- # ]
18
 
19
- demo = gr.Interface(fn=language_translator, inputs='text',outputs='text',title='English To Tamil Translator') #examples=examples)
20
  demo.launch(debug=True,share=True)
 
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
  import gradio as gr
3
+
4
  checkpoint = "Mr-Vicky-01/English-Tamil-Translator"
5
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
6
+ model = AutoModelForSeq2SeqLM.from_pretrained(checkpoint)
7
 
8
  def language_translator(text):
9
  tokenized = tokenizer([text], return_tensors='pt')
10
  out = model.generate(**tokenized, max_length=128)
11
  return tokenizer.decode(out[0],skip_special_tokens=True)
12
 
13
+ examples = [
14
+ ["hello everyone"]
15
+ ["hardwork never fails."],
16
+ ["A room without books is like a body without a soul."],
17
+ ["The Sun is approximately 4.6 billion years older than Earth."],
18
+ ]
19
 
20
+ demo = gr.Interface(fn=language_translator, inputs='text',outputs='text',title='English To Tamil Translator', examples=examples)
21
  demo.launch(debug=True,share=True)