zetavg commited on
Commit
0c97f91
1 Parent(s): 4b2400e

fix possible error

Browse files
Files changed (1) hide show
  1. llama_lora/ui/inference_ui.py +2 -2
llama_lora/ui/inference_ui.py CHANGED
@@ -40,7 +40,7 @@ def do_inference(
40
  prompter = Prompter(prompt_template)
41
  prompt = prompter.generate_prompt(variables)
42
 
43
- if "/" not in lora_model_name and lora_model_name != "None":
44
  path_of_available_lora_model = get_path_of_available_lora_model(
45
  lora_model_name)
46
  if path_of_available_lora_model:
@@ -54,7 +54,7 @@ def do_inference(
54
  return
55
 
56
  model = get_base_model()
57
- if not lora_model_name == "None":
58
  model = get_model_with_lora(lora_model_name)
59
  tokenizer = get_tokenizer()
60
 
 
40
  prompter = Prompter(prompt_template)
41
  prompt = prompter.generate_prompt(variables)
42
 
43
+ if lora_model_name is not None and "/" not in lora_model_name and lora_model_name != "None":
44
  path_of_available_lora_model = get_path_of_available_lora_model(
45
  lora_model_name)
46
  if path_of_available_lora_model:
 
54
  return
55
 
56
  model = get_base_model()
57
+ if not lora_model_name == "None" and lora_model_name is not None:
58
  model = get_model_with_lora(lora_model_name)
59
  tokenizer = get_tokenizer()
60