import gradio as gr import requests import os # Define the Hugging Face API endpoint api_url = "https://api-inference.huggingface.co/models/Qwen/Qwen2.5-72B-Instruct" # Retrieve the token from the environment headers = {"Authorization": f"Bearer {os.getenv('HF_Token')}"} # Define a function to send prompts to the model and get responses def query(prompt): response = requests.post(api_url, headers=headers, json={"inputs": prompt}) if response.status_code == 200: return response.json()[0]["generated_text"] else: return f"Error {response.status_code}: {response.text}" # Create a Gradio interface with text input and output demo = gr.Interface( fn=query, inputs="text", outputs="text", title="Qwen-2.5 72B Interaction", description="Ask complex mathematical or pattern-related questions and get responses from Qwen-2.5 72B." ) # Launch the interface demo.launch()