File size: 1,372 Bytes
5574179 60da28d 5574179 9496d4d 5574179 10602b0 9496d4d 5574179 a0971fe 9496d4d 47d3680 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
# from fastapi import FastAPI
# from fastapi.staticfiles import StaticFiles
# from fastapi.responses import FileResponse
# from transformers import pipeline
# app = FastAPI()
# pipe_flan = pipeline("text2text-generation", model="FreedomIntelligence/Apollo-2B",trust_remote_code=True)
# @app.get("/infer_t5")
# def t5(input):
# output = pipe_flan(input)
# return {"output": output[0]["generated_text"]}
# app.mount("/", StaticFiles(directory="static", html=True), name="static")
# @app.get("/")
# def index() -> FileResponse:
# return FileResponse(path="/app/static/index.html", media_type="text/html")
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from transformers import pipeline
app = FastAPI()
# 更改pipeline类型到text-generation,并使用一个示例模型(如GPT-2)
pipe_gpt = pipeline("text-generation", model="FreedomIntelligence/Apollo-2B",trust_remote_code=True)
@app.get("/infer_t5")
def infer_gpt(input: str):
output = pipe_gpt(input, max_length=500) # 你可以调整max_length来控制生成文本的长度
return {"output": output[0]["generated_text"]}
app.mount("/", StaticFiles(directory="static", html=True), name="static")
@app.get("/")
def index() -> FileResponse:
return FileResponse(path="static/index.html", media_type="text/html") |