import gradio as gr import requests API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct" headers = {"Authorization": "Bearer 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"} API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1" headers2 = {"Authorization": "Bearer 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"} def query(payload): payload2 = "whats the context of the question " + paylaod + " :" response = requests.post(API_URL, headers=headers, json=payload2) return response.json() #["answer"] def query2(q, c): payload = {"question": q, "context": c} response = requests.post(API_URL2, headers=headers2, json=payload) return response.json() iface = gr.Interface( fn=query2, inputs=[gr.Textbox("question"), gr.Textbox("context")], outputs=gr.Textbox("answer"), title="AI Interface", description="Ask the AI model anything!", ) iface.launch()