awacke1 commited on
Commit
e0bdaf1
β€’
1 Parent(s): 19193a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -52
app.py CHANGED
@@ -315,52 +315,6 @@ key = os.getenv('OPENAI_API_KEY')
315
  prompt = f"Write instructions to teach discharge planning along with guidelines and patient education. List entities, features and relationships to CCDA and FHIR objects in boldface."
316
  should_save = st.sidebar.checkbox("πŸ’Ύ Save", value=True, help="Save your session data.")
317
 
318
- # 2. Prompt label button demo for LLM
319
- def add_witty_humor_buttons():
320
- with st.expander("Wit and Humor 🀣", expanded=True):
321
- # Tip about the Dromedary family
322
- st.markdown("πŸ”¬ **Fun Fact**: Dromedaries, part of the camel family, have a single hump and are adapted to arid environments. Their 'superpowers' include the ability to survive without water for up to 7 days, thanks to their specialized blood cells and water storage in their hump.")
323
-
324
- # Define button descriptions
325
- descriptions = {
326
- "Generate Limericks πŸ˜‚": "Write ten random adult limericks based on quotes that are tweet length and make you laugh 🎭",
327
- "Wise Quotes πŸ§™": "Generate ten wise quotes that are tweet length πŸ¦‰",
328
- "Funny Rhymes 🎀": "Create ten funny rhymes that are tweet length 🎢",
329
- "Medical Jokes πŸ’‰": "Create ten medical jokes that are tweet length πŸ₯",
330
- "Minnesota Humor ❄️": "Create ten jokes about Minnesota that are tweet length 🌨️",
331
- "Top Funny Stories πŸ“–": "Create ten funny stories that are tweet length πŸ“š",
332
- "More Funny Rhymes πŸŽ™οΈ": "Create ten more funny rhymes that are tweet length 🎡"
333
- }
334
-
335
- # Create columns
336
- col1, col2, col3 = st.columns([1, 1, 1], gap="small")
337
-
338
- # Add buttons to columns
339
- if col1.button("Wise Limericks πŸ˜‚"):
340
- StreamLLMChatResponse(descriptions["Generate Limericks πŸ˜‚"])
341
-
342
- if col2.button("Wise Quotes πŸ§™"):
343
- StreamLLMChatResponse(descriptions["Wise Quotes πŸ§™"])
344
-
345
- #if col3.button("Funny Rhymes 🎀"):
346
- # StreamLLMChatResponse(descriptions["Funny Rhymes 🎀"])
347
-
348
- col4, col5, col6 = st.columns([1, 1, 1], gap="small")
349
-
350
- if col4.button("Top Ten Funniest Clean Jokes πŸ’‰"):
351
- StreamLLMChatResponse(descriptions["Top Ten Funniest Clean Jokes πŸ’‰"])
352
-
353
- if col5.button("Minnesota Humor ❄️"):
354
- StreamLLMChatResponse(descriptions["Minnesota Humor ❄️"])
355
-
356
- if col6.button("Origins of Medical Science True Stories"):
357
- StreamLLMChatResponse(descriptions["Origins of Medical Science True Stories"])
358
-
359
- col7 = st.columns(1, gap="small")
360
-
361
- if col7[0].button("Top Ten Best Write a streamlit python program prompts to build AI programs. πŸŽ™οΈ"):
362
- StreamLLMChatResponse(descriptions["Top Ten Best Write a streamlit python program prompts to build AI programs. πŸŽ™οΈ"])
363
-
364
  def SpeechSynthesis(result):
365
  documentHTML5='''
366
  <!DOCTYPE html>
@@ -391,8 +345,6 @@ def SpeechSynthesis(result):
391
  components.html(documentHTML5, width=1280, height=300)
392
  #return result
393
 
394
-
395
- # 3. Stream Llama Response
396
  # @st.cache_resource
397
  def StreamLLMChatResponse(prompt):
398
  try:
@@ -433,7 +385,7 @@ def StreamLLMChatResponse(prompt):
433
  SpeechSynthesis(result)
434
  return result
435
  except:
436
- st.write('Llama model is asleep. Starting up now on A10 - please give 5 minutes then retry as KEDA scales up from zero to activate running container(s).')
437
 
438
  # 4. Run query with payload
439
  def query(payload):
@@ -448,8 +400,7 @@ def generate_filename(prompt, file_type):
448
  central = pytz.timezone('US/Central')
449
  safe_date_time = datetime.now(central).strftime("%m%d_%H%M")
450
  replaced_prompt = prompt.replace(" ", "_").replace("\n", "_")
451
- safe_prompt = "".join(x for x in replaced_prompt if x.isalnum() or x == "_")[:255] # 255 is linux max, 260 is windows max
452
- #safe_prompt = "".join(x for x in replaced_prompt if x.isalnum() or x == "_")[:45]
453
  return f"{safe_date_time}_{safe_prompt}.{file_type}"
454
 
455
  # 6. Speech transcription via OpenAI service
@@ -501,7 +452,9 @@ def create_file(filename, prompt, response, should_save=True):
501
  except:
502
  st.write('.')
503
 
504
- #has_python_code = re.search(r"```python([\s\S]*?)```", prompt.strip() + '\r\n' + response)
 
 
505
  #has_python_code = bool(re.search(r"```python([\s\S]*?)```", prompt.strip() + '\r\n' + response))
506
  #if has_python_code:
507
  # python_code = re.findall(r"```python([\s\S]*?)```", response)[0].strip()
 
315
  prompt = f"Write instructions to teach discharge planning along with guidelines and patient education. List entities, features and relationships to CCDA and FHIR objects in boldface."
316
  should_save = st.sidebar.checkbox("πŸ’Ύ Save", value=True, help="Save your session data.")
317
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
318
  def SpeechSynthesis(result):
319
  documentHTML5='''
320
  <!DOCTYPE html>
 
345
  components.html(documentHTML5, width=1280, height=300)
346
  #return result
347
 
 
 
348
  # @st.cache_resource
349
  def StreamLLMChatResponse(prompt):
350
  try:
 
385
  SpeechSynthesis(result)
386
  return result
387
  except:
388
+ st.write('model is asleep. Starting up now on A10 - please give 5 minutes then retry as KEDA scales up from zero to activate running container(s).')
389
 
390
  # 4. Run query with payload
391
  def query(payload):
 
400
  central = pytz.timezone('US/Central')
401
  safe_date_time = datetime.now(central).strftime("%m%d_%H%M")
402
  replaced_prompt = prompt.replace(" ", "_").replace("\n", "_")
403
+ safe_prompt = "".join(x for x in replaced_prompt if x.isalnum() or x == "_")[:255] # 255 is linux max, 260 is windows max filename size.
 
404
  return f"{safe_date_time}_{safe_prompt}.{file_type}"
405
 
406
  # 6. Speech transcription via OpenAI service
 
452
  except:
453
  st.write('.')
454
 
455
+ has_python_code = re.search(r"```python([\s\S]*?)```", prompt.strip() + '\r\n' + response)
456
+ if has_python_code:
457
+ st.markdown('# 🐍 Python Code - Lets make an app.py.')
458
  #has_python_code = bool(re.search(r"```python([\s\S]*?)```", prompt.strip() + '\r\n' + response))
459
  #if has_python_code:
460
  # python_code = re.findall(r"```python([\s\S]*?)```", response)[0].strip()