Spaces:
Build error
Build error
from fastapi import FastAPI, UploadFile, File, HTTPException | |
from fastapi.responses import PlainTextResponse | |
from fastapi.responses import JSONResponse | |
from fastapi.middleware.cors import CORSMiddleware | |
from fastapi import Request | |
import os | |
from dotenv import load_dotenv | |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
from huggingface_hub import login | |
app = FastAPI() | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
# Carrega o token do Hugging Face a partir do .env | |
load_dotenv() | |
hf_token = os.getenv("HF_TOKEN") | |
if not hf_token: | |
raise ValueError("Token do Hugging Face não encontrado. Adicione HF_TOKEN no arquivo .env") | |
# Autentica (opcional se já tiver setado a env var) | |
login(token=hf_token) | |
model_name = "NullisTerminis/oncology_t5" | |
# Carrega o modelo usando o token atualizado | |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token) | |
model = AutoModelForSeq2SeqLM.from_pretrained(model_name, token=hf_token) | |
#Teste simples | |
#input_text = input(str("Texto:")) | |
#inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True) | |
#output = model.generate(**inputs, max_length=128) | |
#decoded_output = tokenizer.decode(output[0], skip_special_tokens=True) | |
#print("Saída gerada pelo modelo:") | |
#print(decoded_output) | |
#ROTAS ///////////////////////////////////////////////////////////////////// | |
async def root(): | |
return ( | |
"Link start!" | |
) | |
async def mensagem(): | |
return {"mensagem": "API funcionando perfeitamente no Hugging Face!"} |