Stephan Arrington commited on
Commit
b088909
1 Parent(s): dea0eab

changed stuff

Browse files
Files changed (3) hide show
  1. .gitignore +2 -0
  2. app.py +52 -32
  3. requirements.txt +3 -2
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ __pycache__/app.cpython-311.pyc
2
+ flagged/log.csv
app.py CHANGED
@@ -1,45 +1,65 @@
1
  import gradio as gr
2
-
3
- # Import the pipeline
4
  from transformers import pipeline
5
 
6
- # Define the pipeline
7
- # Note: This pipeline is hosted on the Hugging Face model hub
8
- # https://huggingface.co/Helsinki-NLP/opus-mt-en-he
9
- # You can replace this with any other translation pipeline
10
- # https://huggingface.co/models?filter=translation
11
- pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-he")
12
 
13
- # Define a pipeline for reverse translation
14
- # Note: This pipeline is hosted on the Hugging Face model hub
15
- # https://huggingface.co/Helsinki-NLP/opus-mt-he-en
16
- # You can replace this with any other translation pipeline
17
- # https://huggingface.co/models?filter=translation
18
- pipe_reverse = pipeline("translation", model="Helsinki-NLP/opus-mt-he-en")
19
 
 
 
 
20
 
21
- # Define the function
22
- def predict(text):
23
- # Return the translation
24
- return pipe(text)[0]["translation_text"]
25
 
26
- def predict_reverse(text):
27
  # Return the translation
28
- return pipe_reverse(text)[0]["translation_text"]
29
 
30
- # Define the interface
31
- iface = gr.Interface(
32
- fn=predict,
33
- fn_reverse=predict_reverse,
34
- inputs='text',
35
- outputs='text',
36
- title="English to Hebrew Translator",
37
- description="Translate English to Hebrew",
38
- examples=[["Hello! My name is Bob."], ["I like to eat apples and banana"]]
39
- )
40
 
 
 
41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
 
43
- # Launch the interface
44
- iface.launch()
 
 
 
 
45
 
 
 
 
 
 
1
  import gradio as gr
 
 
2
  from transformers import pipeline
3
 
4
+ # Using the latest version of Gradio and Transformers
5
+ # We want to expand the interface to include a reverse translation
6
+ # We want to use the Helsinki-NLP/opus-mt-tc-big-he-en model for the reverse translation
 
 
 
7
 
8
+ # A dropdown menu for selecting the model
9
+ model_names = ["Helsinki-NLP/opus-mt-en-he", "Helsinki-NLP/opus-mt-tc-big-he-en"]
10
+ model_name = gr.inputs.Dropdown(model_names, label="Model")
11
+ # Name the dropdown options
12
+ model_name.choices = ["English to Hebrew", "Hebrew to English"]
 
13
 
14
+ # An output text box displaying the translated text and reverse translated text
15
+ translation = gr.outputs.Textbox(label="Translation")
16
+ reverse_translation = gr.outputs.Textbox(label="Reverse Translation")
17
 
18
+ # A function for translating text
19
+ def translate(model_name, text):
20
+ # Create a pipeline for translating from English to Hebrew
21
+ pipe = pipeline("translation", model=model_name)
22
 
 
23
  # Return the translation
24
+ return pipe(text)[0]["translation_text"]
25
 
26
+ # Create an interface for translating text
27
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
28
+ import torch
 
 
 
 
 
 
 
29
 
30
+ tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-he")
31
+ model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-he")
32
 
33
+ def translate(model_name, text):
34
+ # Create a pipeline for translating from English to Hebrew
35
+ #Console out the model name
36
+ print(model_name)
37
+ if model_name == "English to Hebrew":
38
+ forward_tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-he")
39
+ forward_model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-he")
40
+ reverse_tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-tc-big-he-en")
41
+ reverse_model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-tc-big-he-en")
42
+ elif model_name == "Hebrew to English":
43
+ forward_tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-tc-big-he-en")
44
+ forward_model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-tc-big-he-en")
45
+ reverse_tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-he")
46
+ reverse_model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-he")
47
+ else:
48
+ raise ValueError("Invalid model name")
49
+
50
+ # Forward translation
51
+ forward_input_ids = forward_tokenizer.encode(text, return_tensors="pt")
52
+ forward_outputs = forward_model.generate(forward_input_ids)
53
+ forward_translation = forward_tokenizer.decode(forward_outputs[0], skip_special_tokens=True)
54
 
55
+ # Reverse translation
56
+ reverse_input_ids = reverse_tokenizer.encode(forward_translation, return_tensors="pt")
57
+ reverse_outputs = reverse_model.generate(reverse_input_ids)
58
+ reverse_translation = reverse_tokenizer.decode(reverse_outputs[0], skip_special_tokens=True)
59
+
60
+ return forward_translation, reverse_translation
61
 
62
+ iface = gr.Interface(fn=translate, inputs=[model_name, "text"], outputs=[translation, reverse_translation])
63
+
64
+ # Launch the interface
65
+ iface.launch(share=True)
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  gradio
2
- transformers==4.28.0
3
- torch
 
 
1
  gradio
2
+ git+https://github.com/huggingface/transformers
3
+ torch
4
+ sentencepiece