Spaces:
Sleeping
Sleeping
File size: 678 Bytes
b3be359 d8a0eb6 b3be359 78d4d36 d8a0eb6 b2b54d9 d8a0eb6 b3be359 d8a0eb6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from transformers import pipeline
app = FastAPI()
pipe = pipeline("text-generation", model="ai-forever/rugpt3large_based_on_gpt2")
@app.get("/infer_t5")
def t5(input):
tokenizer_kwargs = {'padding':True,'truncation':True,'max_length':512,'return_tensors':'pt'}
output = pipe(input, **tokenizer_kwargs)
return {"output": output[0]["generated_text"]}
app.mount("/", StaticFiles(directory="static", html=True), name="static")
@app.get("/")
def index() -> FileResponse:
return FileResponse(path="/app/static/index.html", media_type="text/html") |