ggbetz commited on
Commit
25b3351
1 Parent(s): d80541a

up hf inference interface

Browse files
Files changed (1) hide show
  1. app.py +14 -10
app.py CHANGED
@@ -1,16 +1,17 @@
1
  # Demo for T5 trained on multi-angular AAAC
2
 
 
3
  import textwrap
4
  import re
5
 
6
  import streamlit as st
7
  from spacy import displacy
8
  import graphviz
 
9
  import seaborn as sns
10
 
11
  import aaac_util as aaac
12
 
13
- from huggingface_hub.inference_api import InferenceApi
14
 
15
 
16
  INTRO_TEXT = """This app let's you explore ArgumentAnalyst, a system for
@@ -85,15 +86,18 @@ def params(config):
85
 
86
 
87
  def build_inference_api():
88
- inference = InferenceApi(
89
- repo_id="debatelab/argument-analyst",
90
- # token=st.secrets['api_token'] # for free without token, but faster with token
91
- )
92
- return inference
93
- #config.max_answer = 450
94
- #config.max_seq_len = 450
95
- #config.no_repeat_ngram_size = 0
96
-
 
 
 
97
 
98
 
99
  @st.cache(allow_output_mutation=True)
 
1
  # Demo for T5 trained on multi-angular AAAC
2
 
3
+ import json
4
  import textwrap
5
  import re
6
 
7
  import streamlit as st
8
  from spacy import displacy
9
  import graphviz
10
+ import requests
11
  import seaborn as sns
12
 
13
  import aaac_util as aaac
14
 
 
15
 
16
 
17
  INTRO_TEXT = """This app let's you explore ArgumentAnalyst, a system for
 
86
 
87
 
88
  def build_inference_api():
89
+ """HF inference api"""
90
+ API_URL = "https://api-inference.huggingface.co/models/debatelab/argument-analyst"
91
+ headers = {}# {"Authorization": f"Bearer {st.secrets['api_token']}"}
92
+
93
+ def query(inputs: str, parameters):
94
+ payload = {"inputs": inputs}
95
+ payload.update(parameters)
96
+ data = json.dumps(payload)
97
+ response = requests.request("POST", API_URL, headers=headers, data=data)
98
+ return json.loads(response.content.decode("utf-8"))
99
+
100
+ return query
101
 
102
 
103
  @st.cache(allow_output_mutation=True)