import gradio as gr from transformers import pipeline, BertForSequenceClassification, BertTokenizer # Load the model and tokenizer from the Hugging Face Hub model = BertForSequenceClassification.from_pretrained("KaranNag/Ai_text_model") tokenizer = BertTokenizer.from_pretrained("KaranNag/Ai_text_model") # Create a pipeline text_classification = pipeline('text-classification', model=model, tokenizer=tokenizer) # Define the Gradio interface function def classify_text(text): result = text_classification(text) label = result[0]['label'] score = result[0]['score'] return f"Label: {label}, Score: {score:.4f}" # Create Gradio interface interface = gr.Interface(fn=classify_text, inputs='text', outputs='text', title='Text Classification', description='Classify text as human or AI generated') if __name__ == "__main__": interface.launch()