testing_ground / app.py
Saiyajino's picture
Create app.py
519371c
raw
history blame
715 Bytes
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
tokenizer = AutoTokenizer.from_pretrained("roberta-large-openai-detector")
model = AutoModelForSequenceClassification.from_pretrained("roberta-large-openai-detector").to(device)
pipe = pipeline("text-classification", model=model, tokenizer=tokenizer, device=device)
def predict(text):
outputs = pipe(text, return_all_scores=True)[0]
predictions = dict([ (x['label'], x['score']) for x in outputs ])
return predictions["LABEL_1"]
iface = gr.Interface(fn=predict, inputs="text", outputs="number")
iface.launch()