ggbetz commited on
Commit
be3c2ec
1 Parent(s): a724493

with token

Browse files
Files changed (1) hide show
  1. app.py +21 -25
app.py CHANGED
@@ -4,7 +4,6 @@ import json
4
  import textwrap
5
  import re
6
 
7
- #from transformers import pipeline
8
  import streamlit as st
9
  from spacy import displacy
10
  import graphviz
@@ -89,7 +88,7 @@ def params(config):
89
  def build_inference_api():
90
  """HF inference api"""
91
  API_URL = "https://api-inference.huggingface.co/models/debatelab/argument-analyst"
92
- headers = {}# {"Authorization": f"Bearer {st.secrets['api_token']}"}
93
 
94
  def query(inputs: str, parameters):
95
  payload = {
@@ -272,7 +271,6 @@ def run_model(mode_set, user_input):
272
 
273
 
274
  inference = build_inference_api()
275
- #t2t_pipeline = pipeline("text2text-generation", model=MODEL)
276
 
277
  current_input = user_input.copy()
278
  output = []
@@ -287,28 +285,26 @@ def run_model(mode_set, user_input):
287
  inquire_prompt = inquire_prompt + (f"{to_key}: {from_key}: {current_input[from_key]}")
288
  # inquire model
289
  inputs = inquire_prompt
290
- if True:
291
- attempts = 0
292
- out = None
293
- while not out and attempts<MAX_API_CALLS:
294
- attempts += 1
295
- try:
296
- # api call
297
- out = inference(inputs, INFERENCE_PARAMS)
298
- if not isinstance(out, list):
299
- raise ValueError('Response is not a list.')
300
- except Exception:
301
- if attempts < MAX_API_CALLS:
302
- st.warning(
303
- f"HF Inference API call (attempt {attempts} of {MAX_API_CALLS}) has failed. Response: {out}. Trying again..."
304
- )
305
- out = None
306
- else:
307
- st.warning(
308
- f"HF Inference API call (attempt {attempts} of {MAX_API_CALLS}) has failed. Response: {out}. Stopping."
309
- )
310
- return None
311
- #out = st.session_state.pipeline(inputs,**INFERENCE_PARAMS)
312
  out = out[0]['generated_text']
313
  # cleanup formalization
314
  if to_key in ['premises_formalized','conclusion_formalized']:
 
4
  import textwrap
5
  import re
6
 
 
7
  import streamlit as st
8
  from spacy import displacy
9
  import graphviz
 
88
  def build_inference_api():
89
  """HF inference api"""
90
  API_URL = "https://api-inference.huggingface.co/models/debatelab/argument-analyst"
91
+ headers = {"Authorization": f"Bearer {st.secrets['api_token']}"}
92
 
93
  def query(inputs: str, parameters):
94
  payload = {
 
271
 
272
 
273
  inference = build_inference_api()
 
274
 
275
  current_input = user_input.copy()
276
  output = []
 
285
  inquire_prompt = inquire_prompt + (f"{to_key}: {from_key}: {current_input[from_key]}")
286
  # inquire model
287
  inputs = inquire_prompt
288
+ attempts = 0
289
+ out = None
290
+ while not out and attempts<MAX_API_CALLS:
291
+ attempts += 1
292
+ try:
293
+ # api call
294
+ out = inference(inputs, INFERENCE_PARAMS)
295
+ if not isinstance(out, list):
296
+ raise ValueError('Response is not a list.')
297
+ except Exception:
298
+ if attempts < MAX_API_CALLS:
299
+ st.warning(
300
+ f"HF Inference API call (attempt {attempts} of {MAX_API_CALLS}) has failed. Response: {out}. Trying again..."
301
+ )
302
+ out = None
303
+ else:
304
+ st.warning(
305
+ f"HF Inference API call (attempt {attempts} of {MAX_API_CALLS}) has failed. Response: {out}. Stopping."
306
+ )
307
+ return None
 
 
308
  out = out[0]['generated_text']
309
  # cleanup formalization
310
  if to_key in ['premises_formalized','conclusion_formalized']: