ivanokhotnikov commited on
Commit
69d6ea1
·
1 Parent(s): bf692b6

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -0
app.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import urllib
4
+
5
+ import requests
6
+ import streamlit as st
7
+ from dotenv import load_dotenv
8
+ from transformers import pipeline
9
+
10
+ load_dotenv()
11
+
12
+ TEST_METRICS = 'https://huggingface.co/nbroad/longformer-base-health-fact/raw/main/test_results.json'
13
+
14
+
15
+ def query(payload):
16
+ API_URL = 'https://api-inference.huggingface.co/models/nbroad/longformer-base-health-fact'
17
+ headers = {'Authorization': f'Bearer {os.environ["API_TOKEN"]}'}
18
+ response = requests.post(API_URL, headers=headers, json=payload)
19
+ return response.json()
20
+
21
+
22
+ st.title('Veracity of a claim with pretrained longformer (PUBHEALTH)')
23
+ api = st.selectbox(label='Select inference API to use',
24
+ options=(None, 'Pipeline API', 'HF Inference API'),
25
+ index=0)
26
+ if api:
27
+ st.header('Claim')
28
+ claim = st.text_area(label='Enter a claim text in the box')
29
+ if api == 'Pipeline API':
30
+ pl = pipeline('text-classification',
31
+ model='nbroad/longformer-base-health-fact')
32
+ st.header('Results')
33
+ st.write(pl(claim))
34
+ elif api == 'HF Inference API':
35
+ st.header('Results')
36
+ st.write(query({'inputs': claim}))
37
+ st.header('Test metrics')
38
+ with urllib.request.urlopen(TEST_METRICS) as response:
39
+ metrics = json.load(response)
40
+ st.write(metrics)