acecalisto3 commited on
Commit
c6d665e
1 Parent(s): 3b9ecbf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -7
app.py CHANGED
@@ -13,7 +13,6 @@ from llama_cpp.llama_cpp_agent import get_messages_formatter_type, get_context_b
13
  from io import StringIO
14
  import tempfile
15
 
16
-
17
  # --- Global Variables ---
18
  CURRENT_PROJECT = {} # Store project data (code, packages, etc.)
19
  MODEL_OPTIONS = {
@@ -49,8 +48,14 @@ if 'current_state' not in st.session_state:
49
  classifier = pipeline("text-classification", model="facebook/bart-large-mnli")
50
 
51
  # --- Load the model and tokenizer ---
52
- model = AutoModelForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1", use_auth_token=os.environ.get("huggingface_token"))
53
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1", use_auth_token=os.environ.get("huggingface_token"))
 
 
 
 
 
 
54
 
55
  # --- Utility Functions ---
56
  def install_and_import(package_name):
@@ -82,7 +87,7 @@ def create_interface_from_input(input_str):
82
  functions = [getattr(module, name) for name in dir(module) if callable(getattr(module, name))]
83
 
84
  function_list = [(func.__name__, func) for func in functions if not func.__name__.startswith("_")]
85
- return function_list, f"'"Interface for `{package_name}` created."'"
86
 
87
  except Exception as e:
88
  return [], str(e)
@@ -95,9 +100,9 @@ def execute_pip_command(command, add_message):
95
  if output == '' and process.poll() is not None:
96
  break
97
  if output:
98
- add_message("System", f"'"\n{output.strip()}\n"'"
99
-
100
- time.sleep(0.1) # Simulate delay for more realistic streaming
101
  rc = process.poll()
102
  return rc
103
 
 
13
  from io import StringIO
14
  import tempfile
15
 
 
16
  # --- Global Variables ---
17
  CURRENT_PROJECT = {} # Store project data (code, packages, etc.)
18
  MODEL_OPTIONS = {
 
48
  classifier = pipeline("text-classification", model="facebook/bart-large-mnli")
49
 
50
  # --- Load the model and tokenizer ---
51
+ model = AutoModelForCausalLM.from_pretrained(
52
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
53
+ use_auth_token=os.environ.get("huggingface_token")
54
+ )
55
+ tokenizer = AutoTokenizer.from_pretrained(
56
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
57
+ use_auth_token=os.environ.get("huggingface_token")
58
+ )
59
 
60
  # --- Utility Functions ---
61
  def install_and_import(package_name):
 
87
  functions = [getattr(module, name) for name in dir(module) if callable(getattr(module, name))]
88
 
89
  function_list = [(func.__name__, func) for func in functions if not func.__name__.startswith("_")]
90
+ return function_list, f"Interface for `{package_name}` created."
91
 
92
  except Exception as e:
93
  return [], str(e)
 
100
  if output == '' and process.poll() is not None:
101
  break
102
  if output:
103
+ # Corrected line: Combine the f-string parts
104
+ add_message("System", f"\n{output.strip()}\n")
105
+ time.sleep(0.1) # Simulate delay for more realistic streaming
106
  rc = process.poll()
107
  return rc
108