ffreemt commited on
Commit
fef5540
1 Parent(s): 92f88db

Update remove references to other models

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -203,7 +203,7 @@ class GenerationConfig:
203
 
204
 
205
  def format_prompt(system_prompt: str, user_prompt: str):
206
- """Format prompt based on: https://huggingface.co/spaces/mosaicml/mpt-30b-chat/blob/main/app.py."""
207
  # TODO im_start/im_end possible fix for WizardCoder
208
 
209
  system_prompt = f"<|im_start|>system\n{system_prompt}<|im_end|>\n"
@@ -250,9 +250,9 @@ MODEL_FILENAME = "wizardlm-13b-v1.0-uncensored.ggmlv3.q4_1.bin" # 8.4G
250
 
251
  DESTINATION_FOLDER = "models"
252
 
253
- REPO_ID = "TheBloke/mpt-30B-chat-GGML"
254
  if "WizardCoder" in MODEL_FILENAME:
255
- REPO_ID = "TheBloke/WizardCoder-15B-1.0-GGML"
256
 
257
  if "uncensored" in MODEL_FILENAME.lower():
258
  REPO_ID = "TheBloke/WizardLM-13B-V1.0-Uncensored-GGML"
@@ -262,7 +262,7 @@ download_quant(DESTINATION_FOLDER, REPO_ID, MODEL_FILENAME)
262
  logger.info("done dl")
263
 
264
  # if "mpt" in model_filename:
265
- # config = AutoConfig.from_pretrained("mosaicml/mpt-30b-cha t", context_length=8192)
266
  # llm = AutoModelForCausalLM.from_pretrained(
267
  # os.path.abspath(f"models/{model_filename}"),
268
  # model_type="mpt",
 
203
 
204
 
205
  def format_prompt(system_prompt: str, user_prompt: str):
206
+ """Format prompt based on: https://huggingface.co/spaces/m osaicml/mpt-30b-chat/blob/main/app.py."""
207
  # TODO im_start/im_end possible fix for WizardCoder
208
 
209
  system_prompt = f"<|im_start|>system\n{system_prompt}<|im_end|>\n"
 
250
 
251
  DESTINATION_FOLDER = "models"
252
 
253
+ REPO_ID = "The Bloke/mpt-30B-chat-GGML"
254
  if "WizardCoder" in MODEL_FILENAME:
255
+ REPO_ID = "The Bloke/WizardCoder-15B-1.0-GGML"
256
 
257
  if "uncensored" in MODEL_FILENAME.lower():
258
  REPO_ID = "TheBloke/WizardLM-13B-V1.0-Uncensored-GGML"
 
262
  logger.info("done dl")
263
 
264
  # if "mpt" in model_filename:
265
+ # config = AutoConfig.from_pretrained("m osaicml/mpt-30b-cha t", context_length=8192)
266
  # llm = AutoModelForCausalLM.from_pretrained(
267
  # os.path.abspath(f"models/{model_filename}"),
268
  # model_type="mpt",