Spaces:
Sleeping
Sleeping
# from fastapi import FastAPI, File, UploadFile | |
# from transformers import MarianMTModel, MarianTokenizer | |
# app = FastAPI() | |
# # Load the translation model and tokenizer | |
# model_name = "Helsinki-NLP/opus-mt-de-en" | |
# model = MarianMTModel.from_pretrained(model_name) | |
# tokenizer = MarianTokenizer.from_pretrained(model_name) | |
# @app.get("/") | |
# def read_root(): | |
# return {"message": "Welcome to the German to English Translation API!"} | |
# @app.post("/translate/") | |
# async def translate_text(text: str): | |
# # Perform translation | |
# input_text = f"translate German to English: {text}" | |
# # Tokenize input text | |
# input_ids = tokenizer.encode(input_text, return_tensors="pt") | |
# # Generate translation | |
# with torch.no_grad(): | |
# output_ids = model.generate(input_ids) | |
# # Decode the output | |
# translated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True) | |
# return {"translated_text": translated_text} | |
from fastapi import FastAPI | |
from transformers import MarianMTModel, MarianTokenizer | |
import torch | |
app = FastAPI() | |
# Load the translation model and tokenizer | |
model_name = "Helsinki-NLP/opus-mt-de-en" | |
model = MarianMTModel.from_pretrained(model_name) | |
tokenizer = MarianTokenizer.from_pretrained(model_name) | |
def read_root(): | |
return {"message": "Welcome to the German to English Translation API!"} | |
async def translate_text(input_text: dict): | |
# Extract the input text from the JSON payload | |
text = input_text.get("text", "") | |
# Perform translation | |
input_text = f"translate German to English: {text}" | |
# Tokenize input text | |
input_ids = tokenizer.encode(input_text, return_tensors="pt") | |
# Generate translation | |
with torch.no_grad(): | |
output_ids = model.generate(input_ids) | |
# Decode the output | |
translated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True) | |
return {"translated_text": translated_text} | |