import gradio as gr import requests API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct" headers = {"Authorization": "Bearer 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"} API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1" headers2 = {"Authorization": "Bearer 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"} model_1_interface = gr.Interface( fn=lambda question, context="": query(question, context, API_URL, headers), inputs=[gr.Textbox("question"), gr.Textbox("context")], outputs=gr.Textbox("answer"), title="Model 1 Interface", description="Ask the AI model anything!", ) model_2_interface = gr.Interface( fn=lambda question, context="": query(question, context, API_URL2, headers2), inputs=[gr.Textbox("question"), gr.Textbox("context")], outputs=gr.Textbox("answer"), title="Model 2 Interface", description="Ask the AI model anything!", ) def query(question, context, api_url, headers): if api_url == API_URL: payload = {"question": question, "context": context} else: paylaod = {"question": "what is the context of the question: "+question+" :"} response = requests.post(api_url, headers=headers, json=payload) return response.json()["answer"] def switch_model(): if gr.Interface.get_active() == model_1_interface: model_2_interface.launch() else: model_1_interface.launch() model_1_interface.launch()