oceansweep commited on
Commit
dc959c4
1 Parent(s): 53626ba

Replace with slightly older app.py

Browse files
App_Function_Libraries/__pycache__/Audio_Files.cpython-310.pyc ADDED
Binary file (13.9 kB). View file
 
App_Function_Libraries/__pycache__/Local_File_Processing_Lib.cpython-310.pyc ADDED
Binary file (2.21 kB). View file
 
App_Function_Libraries/__pycache__/Local_LLM_Inference_Engine_Lib.cpython-310.pyc ADDED
Binary file (13.1 kB). View file
 
App_Function_Libraries/__pycache__/PDF_Ingestion_Lib.cpython-310.pyc ADDED
Binary file (3.88 kB). View file
 
App_Function_Libraries/__pycache__/SQLite_DB.cpython-310.pyc ADDED
Binary file (28.1 kB). View file
 
App_Function_Libraries/__pycache__/System_Checks_Lib.cpython-310.pyc ADDED
Binary file (5.18 kB). View file
 
app.py CHANGED
@@ -746,7 +746,8 @@ Sample commands:
746
  help="Use a local LLM from the script(Downloads llamafile from github and 'mistral-7b-instruct-v0.2.Q8' - 8GB model from Huggingface)")
747
  parser.add_argument('--server_mode', action='store_true',
748
  help='Run in server mode (This exposes the GUI/Server to the network)')
749
- parser.add_argument('-share', '--share_public', action='store_true', help="This will use Gradio's built-in ngrok tunneling to share the server publicly on the internet."),
 
750
  parser.add_argument('--port', type=int, default=7860, help='Port to run the server on')
751
  parser.add_argument('--ingest_text_file', action='store_true',
752
  help='Ingest .txt files as content instead of treating them as URL lists')
@@ -768,11 +769,12 @@ Sample commands:
768
  set_chunk_txt_by_tokens = False
769
  set_max_txt_chunk_tokens = 0
770
 
771
- if args.share_public or args.share:
772
  share_public = args.share_public
773
  else:
774
  share_public = None
775
  if args.server_mode:
 
776
  server_mode = args.server_mode
777
  else:
778
  server_mode = None
@@ -840,12 +842,13 @@ Sample commands:
840
  sys.exit(0)
841
 
842
  # Launch the GUI
 
843
  if args.user_interface:
844
  if local_llm:
845
  local_llm_function()
846
  time.sleep(2)
847
  webbrowser.open_new_tab('http://127.0.0.1:7860')
848
- launch_ui(share_public=False)
849
  elif not args.input_path:
850
  parser.print_help()
851
  sys.exit(1)
 
746
  help="Use a local LLM from the script(Downloads llamafile from github and 'mistral-7b-instruct-v0.2.Q8' - 8GB model from Huggingface)")
747
  parser.add_argument('--server_mode', action='store_true',
748
  help='Run in server mode (This exposes the GUI/Server to the network)')
749
+ parser.add_argument('--share_public', type=int, default=7860,
750
+ help="This will use Gradio's built-in ngrok tunneling to share the server publicly on the internet. Specify the port to use (default: 7860)")
751
  parser.add_argument('--port', type=int, default=7860, help='Port to run the server on')
752
  parser.add_argument('--ingest_text_file', action='store_true',
753
  help='Ingest .txt files as content instead of treating them as URL lists')
 
769
  set_chunk_txt_by_tokens = False
770
  set_max_txt_chunk_tokens = 0
771
 
772
+ if args.share_public:
773
  share_public = args.share_public
774
  else:
775
  share_public = None
776
  if args.server_mode:
777
+
778
  server_mode = args.server_mode
779
  else:
780
  server_mode = None
 
842
  sys.exit(0)
843
 
844
  # Launch the GUI
845
+ # This is huggingface so:
846
  if args.user_interface:
847
  if local_llm:
848
  local_llm_function()
849
  time.sleep(2)
850
  webbrowser.open_new_tab('http://127.0.0.1:7860')
851
+ launch_ui(demo_mode=False)
852
  elif not args.input_path:
853
  parser.print_help()
854
  sys.exit(1)