Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| # Specify the hosted model repository or URL | |
| model_repo = "Tanvi03/ReidLM" # Replace with the actual model repository or URL | |
| # Load the tokenizer and model from the hosted repository | |
| tokenizer = AutoTokenizer.from_pretrained(model_repo) | |
| model = AutoModelForCausalLM.from_pretrained(model_repo) | |
| # Define the function to handle the chat interaction | |
| def chat(message): | |
| input_ids = tokenizer.encode(message, return_tensors="pt") | |
| output = model.generate(input_ids, max_length=100, num_return_sequences=1) | |
| response = tokenizer.decode(output[0], skip_special_tokens=True) | |
| return response | |
| # Create a Gradio interface | |
| iface = gr.Interface( | |
| fn=chat, | |
| inputs=gr.Textbox(placeholder="Enter your message..."), | |
| outputs=gr.Textbox(placeholder="Model's response will appear here..."), | |
| title="Chat with Hosted Model" | |
| ) | |
| # Launch the Gradio app | |
| iface.launch() |