Boardpac/theekshanas commited on
Commit
23257c7
1 Parent(s): 6a78182

keep only working llm models

Browse files
__pycache__/config.cpython-311.pyc CHANGED
Binary files a/__pycache__/config.cpython-311.pyc and b/__pycache__/config.cpython-311.pyc differ
 
config.py CHANGED
@@ -1,9 +1,11 @@
1
  MODELS={
2
  "DEFAULT":"tiiuae/falcon-7b-instruct",
3
- "gpt4all":"gpt4all",
4
  "flan-t5-xxl":"google/flan-t5-xxl",
5
  "falcon-7b-instruct":"tiiuae/falcon-7b-instruct",
6
  "openai gpt-3.5":"openai",
 
 
7
 
8
  }
9
 
 
1
  MODELS={
2
  "DEFAULT":"tiiuae/falcon-7b-instruct",
3
+ # "gpt4all":"gpt4all",
4
  "flan-t5-xxl":"google/flan-t5-xxl",
5
  "falcon-7b-instruct":"tiiuae/falcon-7b-instruct",
6
  "openai gpt-3.5":"openai",
7
+ # "Deci/DeciLM-6b-instruct":"Deci/DeciLM-6b-instruct",
8
+ # "Deci/DeciLM-6b":"Deci/DeciLM-6b",
9
 
10
  }
11
 
qaPipeline.py CHANGED
@@ -148,6 +148,10 @@ class QAPipeline:
148
  self.llm = HuggingFaceHub(repo_id=model_type, model_kwargs={"temperature":0.001, "max_length":1024})
149
  case "openai":
150
  self.llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
 
 
 
 
151
  case _default:
152
  # raise exception if model_type is not supported
153
  raise Exception(f"Model type {model_type} is not supported. Please choose a valid one")
 
148
  self.llm = HuggingFaceHub(repo_id=model_type, model_kwargs={"temperature":0.001, "max_length":1024})
149
  case "openai":
150
  self.llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
151
+ case "Deci/DeciLM-6b-instruct":
152
+ self.llm = ChatOpenAI(model_name="Deci/DeciLM-6b-instruct", temperature=0)
153
+ case "Deci/DeciLM-6b":
154
+ self.llm = ChatOpenAI(model_name="Deci/DeciLM-6b", temperature=0)
155
  case _default:
156
  # raise exception if model_type is not supported
157
  raise Exception(f"Model type {model_type} is not supported. Please choose a valid one")