import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM, Conversation # Load DialoGPT-medium model and tokenizer tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium") model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium") def chatbot(input_text): # Tokenize input text input_ids = tokenizer.encode(input_text + tokenizer.eos_token, return_tensors="pt") # Generate response response_ids = model.generate(input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id) # Decode and return response response_text = tokenizer.decode(response_ids[:, input_ids.shape[-1]:][0], skip_special_tokens=True) return response_text # Interface iface = gr.Interface( fn=chatbot, inputs=gr.Textbox(lines=2, label="Input"), outputs="text", title="Chat with DialoGPT-medium" ) iface.launch()