nxphi47 commited on
Commit
fafc188
1 Parent(s): c821309

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -10
app.py CHANGED
@@ -523,6 +523,10 @@ async def _stream_fn(
523
  except StopIteration:
524
  update = history + [[message, None]]
525
  yield update, update
 
 
 
 
526
  try:
527
  async for response in generator:
528
  update = history + [[message, response]]
@@ -683,10 +687,14 @@ def vllm_generate_stream(
683
 
684
 
685
  # ! avoid saying
686
- LANG_BLOCK_MESSAGE = """Sorry, the language you have asked is currently not supported. If you have questions in other supported languages, I'll be glad to help. \
687
- Please also consider clearing the chat box for a better experience."""
 
 
 
 
688
 
689
- KEYWORD_BLOCK_MESSAGE = "Sorry, I cannot fulfill your request. If you have any unrelated question, I'll be glad to help."
690
 
691
 
692
  def _detect_lang(text):
@@ -696,7 +704,6 @@ def _detect_lang(text):
696
  try:
697
  dlang = detect_lang(text)
698
  except Exception as e:
699
- # print(f'Error: {e}')
700
  if "No features in text." in str(e):
701
  return "en"
702
  else:
@@ -767,7 +774,12 @@ def chatml_format(message, history=None, system_prompt=None):
767
  return chatml_chat_convo_format(conversations, True, default_system=system_prompt)
768
 
769
 
770
- def debug_chat_response_stream_multiturn(*args, **kwargs):
 
 
 
 
 
771
  message = "This is a debugging message"
772
  for i in range(len(message)):
773
  time.sleep(0.05)
@@ -787,13 +799,10 @@ def chat_response_stream_multiturn(
787
  ) -> str:
788
  global LOG_FILE, LOG_PATH
789
  if DEBUG:
790
- yield from debug_chat_response_stream_multiturn()
791
  return
792
  from vllm import LLM, SamplingParams
793
  """Build multi turn
794
- <bos>[INST] B_SYS SytemPrompt E_SYS Prompt [/INST] Answer <eos>
795
- <bos>[INST] Prompt [/INST] Answer <eos>
796
- <bos>[INST] Prompt [/INST]
797
 
798
  message is incoming prompt
799
  history don't have the current messauge
@@ -1424,7 +1433,8 @@ def launch_demo():
1424
 
1425
  if DEBUG:
1426
  model_desc += "\n<br>!!!!! This is in debug mode, responses will copy original"
1427
- response_fn = debug_chat_response_echo
 
1428
  print(f'Creating in DEBUG MODE')
1429
  if SAVE_LOGS:
1430
  LOG_FILE = open(LOG_PATH, 'a', encoding='utf-8')
 
523
  except StopIteration:
524
  update = history + [[message, None]]
525
  yield update, update
526
+ except Exception as e:
527
+ yield history, history
528
+ raise e
529
+
530
  try:
531
  async for response in generator:
532
  update = history + [[message, response]]
 
687
 
688
 
689
  # ! avoid saying
690
+ # LANG_BLOCK_MESSAGE = """Sorry, the language you have asked is currently not supported. If you have questions in other supported languages, I'll be glad to help. \
691
+ # Please also consider clearing the chat box for a better experience."""
692
+
693
+ # KEYWORD_BLOCK_MESSAGE = "Sorry, I cannot fulfill your request. If you have any unrelated question, I'll be glad to help."
694
+
695
+ LANG_BLOCK_MESSAGE = """Unsupported language."""
696
 
697
+ KEYWORD_BLOCK_MESSAGE = "Invalid request."
698
 
699
 
700
  def _detect_lang(text):
 
704
  try:
705
  dlang = detect_lang(text)
706
  except Exception as e:
 
707
  if "No features in text." in str(e):
708
  return "en"
709
  else:
 
774
  return chatml_chat_convo_format(conversations, True, default_system=system_prompt)
775
 
776
 
777
+ def debug_chat_response_stream_multiturn(message, history):
778
+ message_safety = safety_check(message, history=history)
779
+ if message_safety is not None:
780
+ # yield message_safety
781
+ raise gr.Error(message_safety)
782
+
783
  message = "This is a debugging message"
784
  for i in range(len(message)):
785
  time.sleep(0.05)
 
799
  ) -> str:
800
  global LOG_FILE, LOG_PATH
801
  if DEBUG:
802
+ yield from debug_chat_response_stream_multiturn(message, history)
803
  return
804
  from vllm import LLM, SamplingParams
805
  """Build multi turn
 
 
 
806
 
807
  message is incoming prompt
808
  history don't have the current messauge
 
1433
 
1434
  if DEBUG:
1435
  model_desc += "\n<br>!!!!! This is in debug mode, responses will copy original"
1436
+ # response_fn = debug_chat_response_echo
1437
+ response_fn = chat_response_stream_multiturn
1438
  print(f'Creating in DEBUG MODE')
1439
  if SAVE_LOGS:
1440
  LOG_FILE = open(LOG_PATH, 'a', encoding='utf-8')