|
import gradio as gr |
|
import subprocess |
|
import sys |
|
|
|
def install_if_missing(package): |
|
try: |
|
__import__(package) |
|
except ImportError: |
|
subprocess.check_call([sys.executable, "-m", "pip", "install", package]) |
|
|
|
|
|
try: |
|
subprocess.check_call([sys.executable, "-m", "ensurepip", "--upgrade"]) |
|
subprocess.check_call([sys.executable, "-m", "pip", "install", "--upgrade", "pip"]) |
|
except subprocess.CalledProcessError as e: |
|
print("Error upgrading pip:", e) |
|
|
|
|
|
install_if_missing("transformers") |
|
install_if_missing("torch") |
|
|
|
|
|
from transformers import pipeline |
|
import torch |
|
|
|
|
|
|
|
model_name = "gpt2" |
|
generator = pipeline("text-generation", model=model_name, device=0 if torch.cuda.is_available() else -1) |
|
|
|
|
|
penalty_weight = 0.5 |
|
threshold = 0.8 |
|
|
|
|
|
def generate_with_otp(input_text): |
|
|
|
generated = generator(input_text, max_length=100, num_return_sequences=1) |
|
output = generated[0]['generated_text'] |
|
|
|
|
|
if len(output) > 0 and output.count('.') < 2: |
|
output = f"Generated text might contain hallucinations: {output}" |
|
|
|
return output |
|
|
|
|
|
iface = gr.Interface(fn=generate_with_otp, |
|
inputs="text", |
|
outputs="text", |
|
title="OPERA - Hallucination Mitigation using OTP", |
|
description="This app generates text using GPT-2 with Over-Trust Penalty (OTP) for hallucination mitigation.") |
|
|
|
|
|
if __name__ == "__main__": |
|
iface.launch() |
|
|