Spaces:
Running
Running
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline | |
import torch | |
import torch.nn.functional as F | |
# model yükleme | |
model_name = "fc63/gender_prediction_model_from_text" | |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False) | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
model.eval() | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
model.to(device) | |
# translate pipeline (multilingual → İngilizce) | |
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-mul-en") | |
def predict(text, language): | |
original_text = text | |
if language == "Not English": | |
try: | |
translated = translator(text)[0]["translation_text"] | |
except Exception as e: | |
return f"Translation failed: {e}" | |
else: | |
translated = text | |
# model inference | |
inputs = tokenizer(translated, return_tensors="pt", truncation=True, padding=True, max_length=128).to(device) | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
probs = F.softmax(outputs.logits, dim=1) | |
pred = torch.argmax(probs, dim=1).item() | |
gender = "Female" if pred == 0 else "Male" | |
confidence = round(probs[0][pred].item() * 100, 1) | |
return f"{gender} (Confidence: {confidence}%)" | |
# interface / arayüz | |
demo = gr.Interface( | |
fn=predict, | |
inputs=[ | |
gr.Textbox(label="Enter your text here", lines=4, placeholder="Type something..."), | |
gr.Radio(["English", "Not English"], label="Text Language", value="English") | |
], | |
outputs="text", | |
title="Gender Prediction", | |
description="Predicts the author's or speaker gender from a text. Supports non-English inputs via automatic translation." | |
) | |
demo.launch() | |