Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -7,12 +7,12 @@ headers = {"Authorization": "Bearer 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"}
|
|
7 |
API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
|
8 |
headers2 = {"Authorization": "Bearer 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"}
|
9 |
|
10 |
-
def query(
|
11 |
-
|
12 |
-
response = requests.post(API_URL, headers=headers, json=
|
13 |
return response.json()["answer"]
|
14 |
|
15 |
-
def query2(question, context):
|
16 |
payload = {"question": question, "context": context}
|
17 |
response = requests.post(API_URL2, headers=headers2, json=payload)
|
18 |
return response.json()["answer"]
|
@@ -21,9 +21,10 @@ iface = gr.Interface(
|
|
21 |
fn=query2,
|
22 |
inputs=[gr.Textbox("question"), gr.Textbox("context")],
|
23 |
outputs=gr.Textbox("answer"),
|
24 |
-
title="AI Interface",
|
25 |
description="Ask the AI model anything!",
|
26 |
)
|
|
|
27 |
iface2 = gr.Interface(
|
28 |
fn=query,
|
29 |
inputs=[gr.Textbox("question")],
|
@@ -32,5 +33,4 @@ iface2 = gr.Interface(
|
|
32 |
description="Ask the AI model anything!",
|
33 |
)
|
34 |
|
35 |
-
|
36 |
-
iface2.launch()
|
|
|
7 |
API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
|
8 |
headers2 = {"Authorization": "Bearer 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"}
|
9 |
|
10 |
+
def query(question):
|
11 |
+
payload = {"question": "what is the context of "+question+" : "}
|
12 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
13 |
return response.json()["answer"]
|
14 |
|
15 |
+
def query2(question, context):
|
16 |
payload = {"question": question, "context": context}
|
17 |
response = requests.post(API_URL2, headers=headers2, json=payload)
|
18 |
return response.json()["answer"]
|
|
|
21 |
fn=query2,
|
22 |
inputs=[gr.Textbox("question"), gr.Textbox("context")],
|
23 |
outputs=gr.Textbox("answer"),
|
24 |
+
title="AI Interface 2",
|
25 |
description="Ask the AI model anything!",
|
26 |
)
|
27 |
+
|
28 |
iface2 = gr.Interface(
|
29 |
fn=query,
|
30 |
inputs=[gr.Textbox("question")],
|
|
|
33 |
description="Ask the AI model anything!",
|
34 |
)
|
35 |
|
36 |
+
gr.launch_in_debugger([iface, iface2])
|
|