import gradio as gr import requests API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct" headers = {"Authorization": "Bearer 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"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1" headers2 = {"Authorization": "Bearer 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"} def query2(payload): response = requests.post(API_URL2, headers=headers2, json=payload) return response.json() def inference_ui(question): output = query({ "inputs": f"context for '{question}' is:", }) output2 = query2({ "inputs": { "question": question, "context": output }, }) return output2 iface = gr.Interface(fn=inference_ui, inputs="text", outputs="text") iface.launch()