awacke1 commited on
Commit
2f082e6
1 Parent(s): 1780205

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -46,16 +46,16 @@ st.set_page_config(page_title="GPT Streamlit Document Reasoner", layout="wide")
46
  # UI Controls
47
  should_save = st.sidebar.checkbox("💾 Save", value=True)
48
 
49
- # Functions
50
-
51
  # Function to add witty and humor buttons
52
  def add_witty_humor_buttons():
53
  with st.expander("Wit and Humor 🤣", expanded=True):
54
  button_description = "Write ten random adult limericks based on quotes that are tweet length and make you laugh 🎭"
55
  button_label = "Generate Limericks 😂"
56
-
57
  if st.button(button_label):
 
58
  StreamLLMChatResponse(button_description)
 
 
59
 
60
  # Function to Stream Inference Client for Inference Endpoint Responses
61
  def StreamLLMChatResponse(prompt):
 
46
  # UI Controls
47
  should_save = st.sidebar.checkbox("💾 Save", value=True)
48
 
 
 
49
  # Function to add witty and humor buttons
50
  def add_witty_humor_buttons():
51
  with st.expander("Wit and Humor 🤣", expanded=True):
52
  button_description = "Write ten random adult limericks based on quotes that are tweet length and make you laugh 🎭"
53
  button_label = "Generate Limericks 😂"
 
54
  if st.button(button_label):
55
+ try:
56
  StreamLLMChatResponse(button_description)
57
+ except:
58
+ st.write('Dr. Llama is asleep. Starting up now on A10 - please give 5 minutes then retry as KEDA scales up from zero to activate running container(s).')
59
 
60
  # Function to Stream Inference Client for Inference Endpoint Responses
61
  def StreamLLMChatResponse(prompt):