from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration, MarianMTModel, MarianTokenizer import gradio as gr # Load pre-trained model and tokenizer model_name = 'facebook/blenderbot-400M-distill' model = BlenderbotForConditionalGeneration.from_pretrained(model_name) tokenizer = BlenderbotTokenizer.from_pretrained(model_name) # Load translation model and tokenizer translation_model_name = 'Helsinki-NLP/opus-mt-en-ar' translation_model = MarianMTModel.from_pretrained(translation_model_name) translation_tokenizer = MarianTokenizer.from_pretrained(translation_model_name) chat_history = [] def chatbot(input_text): global chat_history chat_history.append(input_text) inputs = tokenizer([input_text], return_tensors='pt') # Generate a response reply_ids = model.generate(**inputs) # Decode the response response = tokenizer.decode(reply_ids[0], skip_special_tokens=True) # Translate response to Arabic translation_inputs = translation_tokenizer([response], return_tensors='pt', padding=True) translation_outputs = translation_model.generate(**translation_inputs) translated_response = translation_tokenizer.decode(translation_outputs[0], skip_special_tokens=True) chat_history.append(translated_response) return "\n".join(chat_history) iface = gr.Interface(fn=chatbot, inputs=gr.inputs.Textbox(lines=2, placeholder='Type here...'), outputs="text") iface.launch()