rajeshradhakrishnan commited on
Commit
596c31b
1 Parent(s): 3043eed

English-Malayalam Translate v14

Browse files
Files changed (2) hide show
  1. main.py +5 -14
  2. requirements.txt +3 -3
main.py CHANGED
@@ -3,27 +3,18 @@ from fastapi import FastAPI
3
  from fastapi.staticfiles import StaticFiles
4
  #from fastapi.responses import FileResponse
5
  from fastapi.templating import Jinja2Templates
6
- from transformers import MBartForConditionalGeneration, MBart50TokenizerFast
7
 
8
 
9
- model = MBartForConditionalGeneration.from_pretrained("facebook/mbart-large-50-one-to-many-mmt")
10
- tokenizer = MBart50TokenizerFast.from_pretrained("facebook/mbart-large-50-one-to-many-mmt", src_lang="en_XX")
11
-
12
  app = FastAPI()
13
  templates = Jinja2Templates(directory="templates")
14
 
 
 
15
  @app.get("/infer_t5")
16
  def t5(input):
17
- model_inputs = tokenizer(input, return_tensors="pt")
18
-
19
- # translate from English to Malayalam
20
- generated_tokens = model.generate(
21
- **model_inputs,
22
- forced_bos_token_id=tokenizer.lang_code_to_id["ml_IN"]
23
- )
24
-
25
- output = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
26
- return {"output":output}
27
 
28
  app.mount("/", StaticFiles(directory="static", html=True), name="static")
29
 
 
3
  from fastapi.staticfiles import StaticFiles
4
  #from fastapi.responses import FileResponse
5
  from fastapi.templating import Jinja2Templates
6
+ from transformers import pipeline
7
 
8
 
 
 
 
9
  app = FastAPI()
10
  templates = Jinja2Templates(directory="templates")
11
 
12
+ pipe_flan = pipeline("translation_xx_to_en", model="google/flax-t5-base-ml-mono", tokenizer="google/mt5-base")
13
+
14
  @app.get("/infer_t5")
15
  def t5(input):
16
+ output = pipe_flan(input)
17
+ return {"output": output[0]["generated_text"]}
 
 
 
 
 
 
 
 
18
 
19
  app.mount("/", StaticFiles(directory="static", html=True), name="static")
20
 
requirements.txt CHANGED
@@ -1,6 +1,6 @@
1
  fastapi==0.74.*
2
  requests==2.27.*
3
- torch
4
- transformers
5
- sentencepiece
6
  uvicorn[standard]==0.17.*
 
1
  fastapi==0.74.*
2
  requests==2.27.*
3
+ sentencepiece==0.1.*
4
+ torch==1.11.*
5
+ transformers==4.*
6
  uvicorn[standard]==0.17.*