huangjinghua commited on
Commit
f667279
1 Parent(s): 3636d87
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -2,16 +2,15 @@
2
  import gradio as gr
3
  # Load model directly
4
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
 
 
5
 
6
 
7
- def chineseToOther(name, lang = "en"):
8
- tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
9
- model = AutoModelForSeq2SeqLM.from_pretrained("facebook/m2m100_418M")
10
  okenizer.src_lang = "zh"
11
  encoded_zh = tokenizer(chinese_text, return_tensors="pt")
12
  generated_tokens = model.generate(**encoded_zh, forced_bos_token_id=tokenizer.get_lang_id(lang))
13
  translated_text = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
14
  return translated_text
15
 
16
- demo = gr.Interface(fn=chineseToOther, inputs="text", outputs="text")
17
- demo.launch()
 
2
  import gradio as gr
3
  # Load model directly
4
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
+ tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
6
+ model = AutoModelForSeq2SeqLM.from_pretrained("facebook/m2m100_418M")
7
 
8
 
9
+ def chineseToOther(chinese_text, lang = "en"):
 
 
10
  okenizer.src_lang = "zh"
11
  encoded_zh = tokenizer(chinese_text, return_tensors="pt")
12
  generated_tokens = model.generate(**encoded_zh, forced_bos_token_id=tokenizer.get_lang_id(lang))
13
  translated_text = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
14
  return translated_text
15
 
16
+ gr.ChatInterface(chineseToOther).launch()