Spaces:
Paused
Paused
import gradio as gr | |
import requests | |
# Define the Hugging Face API endpoint and your API token | |
API_URL = "https://z94ka3s1dsuof4va.us-east-1.aws.endpoints.huggingface.cloud" | |
API_TOKEN = "hf\_XgrSWzAWKtqKXgSFLZMZsQeSSjCcMbqUIt" # Replace with your actual API token | |
# Function to query the Hugging Face model | |
def query_huggingface_model(input_text): | |
headers = {"Authorization": f"Bearer {API_TOKEN}"} | |
payload = {"inputs": input_text} | |
response = requests.post(API_URL, headers=headers, json=payload) | |
if response.status_code == 200: | |
return response.json() | |
else: | |
return {"error": f"Request failed with status code {response.status_code}"} | |
# Define a function to process the input and return the model's output | |
def generate_response(input_text): | |
response = query_huggingface_model(input_text) | |
if "error" in response: | |
return response["error"] | |
else: | |
return response[0]['generated_text'] | |
# Create a Gradio interface | |
iface = gr.Interface( | |
fn=generate_response, | |
inputs=gr.Textbox(lines=2, placeholder="Enter your text here..."), | |
outputs="text", | |
title="LLaMA-2-7B Guanaco Dolly Mini Model", | |
description="Generate responses using the LLaMA-2-7B Guanaco Dolly Mini model from Hugging Face." | |
) | |
# Launch the interface | |
iface.launch() | |