mahiatlinux commited on
Commit
9a170de
1 Parent(s): e1f6a6a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -24,7 +24,7 @@ if not torch.cuda.is_available():
24
 
25
  # If a GPU is available, load the model and tokenizer with specific configurations.
26
  if torch.cuda.is_available():
27
- model_id = "mahiatlinux/MasherAI-7B-v6.1-another_test3"
28
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
29
  tokenizer = AutoTokenizer.from_pretrained(model_id)
30
  tokenizer.use_default_system_prompt = False
@@ -44,7 +44,7 @@ def generate(
44
  # Preparing conversation history for processing.
45
  conversation = []
46
  # Adding system prompt.
47
- #conversation.append({"from": "human", "value": system_prompt})
48
  # Extending the conversation history with user and assistant interactions.
49
  for user, assistant in chat_history:
50
  conversation.extend([{"from": "human", "value": user}, {"from": "gpt", "value": assistant}])
 
24
 
25
  # If a GPU is available, load the model and tokenizer with specific configurations.
26
  if torch.cuda.is_available():
27
+ model_id = "mahiatlinux/MasherAI-v6.1-7B-checkpoint3-code4"
28
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
29
  tokenizer = AutoTokenizer.from_pretrained(model_id)
30
  tokenizer.use_default_system_prompt = False
 
44
  # Preparing conversation history for processing.
45
  conversation = []
46
  # Adding system prompt.
47
+ # conversation.append({"from": "human", "value": system_prompt})
48
  # Extending the conversation history with user and assistant interactions.
49
  for user, assistant in chat_history:
50
  conversation.extend([{"from": "human", "value": user}, {"from": "gpt", "value": assistant}])