Spaces:
Running
Running
| import gradio as gr | |
| import torch | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| # ================= LOAD MODEL ================= | |
| MODEL_NAME = "distilbert-base-multilingual-cased" | |
| device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
| model = AutoModelForSequenceClassification.from_pretrained( | |
| MODEL_NAME, | |
| num_labels=2 | |
| ) | |
| model.to(device) | |
| model.eval() | |
| # ================= AI FUNCTION ================= | |
| def classify_text(text): | |
| if not text or text.strip() == "": | |
| return "β Please enter some text" | |
| inputs = tokenizer( | |
| text, | |
| return_tensors="pt", | |
| truncation=True, | |
| padding=True | |
| ).to(device) | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| probs = torch.softmax(outputs.logits, dim=1)[0] | |
| human = probs[0].item() * 100 | |
| ai = probs[1].item() * 100 | |
| return f""" | |
| β Human Probability: {human:.2f} % | |
| π€ AI Probability: {ai:.2f} % | |
| """ | |
| # ================= UI ================= | |
| with gr.Blocks(theme=gr.themes.Soft()) as app: | |
| gr.Markdown( | |
| "<h1 style='text-align:center;'>AI vs Human Text Detector</h1>" | |
| ) | |
| gr.Markdown( | |
| "<p style='text-align:center;'>Professional NLP Web Application</p>" | |
| ) | |
| with gr.Tab("π Text Input"): | |
| text_input = gr.Textbox( | |
| lines=8, | |
| placeholder="Paste text here..." | |
| ) | |
| analyze_btn = gr.Button("Analyze Text") | |
| result = gr.Textbox(label="Result") | |
| analyze_btn.click(classify_text, text_input, result) | |
| gr.Markdown( | |
| "<p style='text-align:center; font-size:12px;'>" | |
| "Created by Manar | AI Project 2025</p>" | |
| ) | |
| app.launch() | |