Tuchuanhuhuhu commited on
Commit
104cf1c
1 Parent(s): b48d5f8

支持读取本地文件夹里的model;将alpaca视作llama加载

Browse files
Files changed (2) hide show
  1. modules/base_model.py +1 -1
  2. modules/presets.py +8 -1
modules/base_model.py CHANGED
@@ -37,7 +37,7 @@ class ModelType(Enum):
37
  model_type = ModelType.OpenAI
38
  elif "chatglm" in model_name_lower:
39
  model_type = ModelType.ChatGLM
40
- elif "llama" in model_name_lower:
41
  model_type = ModelType.LLaMA
42
  else:
43
  model_type = ModelType.Unknown
 
37
  model_type = ModelType.OpenAI
38
  elif "chatglm" in model_name_lower:
39
  model_type = ModelType.ChatGLM
40
+ elif "llama" in model_name_lower or "alpaca" in model_name_lower:
41
  model_type = ModelType.LLaMA
42
  else:
43
  model_type = ModelType.Unknown
modules/presets.py CHANGED
@@ -1,7 +1,9 @@
1
  # -*- coding:utf-8 -*-
2
- import gradio as gr
3
  from pathlib import Path
4
 
 
 
5
  # ChatGPT 设置
6
  INITIAL_SYSTEM_PROMPT = "You are a helpful assistant."
7
  API_HOST = "api.openai.com"
@@ -78,6 +80,11 @@ MODELS = [
78
  "llama-65b-hf",
79
  ] # 可选的模型
80
 
 
 
 
 
 
81
  DEFAULT_MODEL = 0 # 默认的模型在MODELS中的序号,从0开始数
82
 
83
  MODEL_TOKEN_LIMIT = {
 
1
  # -*- coding:utf-8 -*-
2
+ import os
3
  from pathlib import Path
4
 
5
+ import gradio as gr
6
+
7
  # ChatGPT 设置
8
  INITIAL_SYSTEM_PROMPT = "You are a helpful assistant."
9
  API_HOST = "api.openai.com"
 
80
  "llama-65b-hf",
81
  ] # 可选的模型
82
 
83
+ for dir_name in os.listdir("models"):
84
+ if os.path.isdir(os.path.join("models", dir_name)):
85
+ if dir_name not in MODELS:
86
+ MODELS.append(dir_name)
87
+
88
  DEFAULT_MODEL = 0 # 默认的模型在MODELS中的序号,从0开始数
89
 
90
  MODEL_TOKEN_LIMIT = {