feng2022 commited on
Commit
3361691
1 Parent(s): 9140d4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -8
app.py CHANGED
@@ -39,6 +39,7 @@ TOKEN = "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"
39
 
40
 
41
  pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
 
42
 
43
  def parse_args() -> argparse.Namespace:
44
  parser = argparse.ArgumentParser()
@@ -69,7 +70,12 @@ def load_model(file_name: str, path:str,device: torch.device) -> nn.Module:
69
 
70
  def predict(text):
71
  return pipe(text)[0]["translation_text"]
72
-
 
 
 
 
 
73
  def main():
74
  #torch.cuda.init()
75
  #if torch.cuda.is_initialized():
@@ -93,11 +99,11 @@ def main():
93
  latent = torch.randn((1, 512), device=device)
94
  #img_out, _, _ = generator([latent])
95
  #imgs_arr = make_image(img_out)
96
- iface = gr.Interface(
97
- fn=predict,
98
- inputs='text',
99
- outputs='text',
100
- examples=['result'],
101
  #gr.outputs.Image(type='numpy', label='Output'),
102
  #title=TITLE,
103
  #description=DESCRIPTION,
@@ -105,13 +111,19 @@ def main():
105
  #theme=args.theme,
106
  #allow_flagging=args.allow_flagging,
107
  #live=args.live,
108
- )
109
 
110
- iface.launch(
111
  #enable_queue=args.enable_queue,
112
  #server_port=args.port,
113
  #share=args.share,
 
 
 
 
 
114
  )
 
115
 
116
  if __name__ == '__main__':
117
  main()
 
39
 
40
 
41
  pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
42
+ scores = []
43
 
44
  def parse_args() -> argparse.Namespace:
45
  parser = argparse.ArgumentParser()
 
70
 
71
  def predict(text):
72
  return pipe(text)[0]["translation_text"]
73
+
74
+ def track_score(score):
75
+ scores.append(score)
76
+ top_scores = sorted(scores, reverse=True)[:3]
77
+ return top_scores
78
+
79
  def main():
80
  #torch.cuda.init()
81
  #if torch.cuda.is_initialized():
 
99
  latent = torch.randn((1, 512), device=device)
100
  #img_out, _, _ = generator([latent])
101
  #imgs_arr = make_image(img_out)
102
+ #iface = gr.Interface(
103
+ #fn=predict,
104
+ #inputs='text',
105
+ #outputs='text',
106
+ #examples=['result'],
107
  #gr.outputs.Image(type='numpy', label='Output'),
108
  #title=TITLE,
109
  #description=DESCRIPTION,
 
111
  #theme=args.theme,
112
  #allow_flagging=args.allow_flagging,
113
  #live=args.live,
114
+ #)
115
 
116
+ #iface.launch(
117
  #enable_queue=args.enable_queue,
118
  #server_port=args.port,
119
  #share=args.share,
120
+ #)
121
+ demo = gr.Interface(
122
+ track_score,
123
+ gr.Number(label="Score"),
124
+ gr.JSON(label="Top Scores")
125
  )
126
+ demo.launch()
127
 
128
  if __name__ == '__main__':
129
  main()