torVik commited on
Commit
2fd430b
·
verified ·
1 Parent(s): 098aed9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -32,7 +32,7 @@ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
32
 
33
  # Debugging: GPU check passed, loading model
34
  if torch.cuda.is_available():
35
- model_id = "torVik/bggpt-Instruct-bglawinsv1UNS"
36
  try:
37
  print("Loading model...")
38
  model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN)
 
32
 
33
  # Debugging: GPU check passed, loading model
34
  if torch.cuda.is_available():
35
+ model_id = "BGLAW/bggpt-Instruct-bglawinsv1UNS_merged"
36
  try:
37
  print("Loading model...")
38
  model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN)