imperialwool commited on
Commit
9cb7ee7
1 Parent(s): cd2b254

Update gradio_app.py

Browse files
Files changed (1) hide show
  1. gradio_app.py +4 -1
gradio_app.py CHANGED
@@ -13,7 +13,9 @@ translator_tokenizer = M2M100Tokenizer.from_pretrained( # tokenizer f
13
  translator_model = M2M100ForConditionalGeneration.from_pretrained( # translator model
14
  "facebook/m2m100_1.2B", cache_dir="translator/"
15
  )
 
16
  translator_model.eval()
 
17
 
18
  # Preparing things to work
19
  translator_tokenizer.src_lang = "en"
@@ -65,7 +67,7 @@ def generate_answer(request: str, max_tokens: int = 256, language: str = "en", c
65
  print(e)
66
  return "Oops! Internal server error. Check the logs of space/instance."
67
 
68
-
69
  demo = gr.Interface(
70
  fn=generate_answer,
71
  inputs=[
@@ -79,4 +81,5 @@ demo = gr.Interface(
79
  description=desc
80
  ).queue()
81
  if __name__ == "__main__":
 
82
  demo.launch()
 
13
  translator_model = M2M100ForConditionalGeneration.from_pretrained( # translator model
14
  "facebook/m2m100_1.2B", cache_dir="translator/"
15
  )
16
+ print("! SETTING MODEL IN EVALUATION MODE !")
17
  translator_model.eval()
18
+ print("! DONE !")
19
 
20
  # Preparing things to work
21
  translator_tokenizer.src_lang = "en"
 
67
  print(e)
68
  return "Oops! Internal server error. Check the logs of space/instance."
69
 
70
+ print("! LOAD GRADIO INTERFACE !")
71
  demo = gr.Interface(
72
  fn=generate_answer,
73
  inputs=[
 
81
  description=desc
82
  ).queue()
83
  if __name__ == "__main__":
84
+ print("! LAUNCHING GRADIO !")
85
  demo.launch()