Spaces:
Sleeping
Sleeping
adityakumar
commited on
Commit
•
328d783
1
Parent(s):
9bcac6c
Update app.py
Browse files
app.py
CHANGED
@@ -16,9 +16,10 @@ from langchain.chains import ConversationalRetrievalChain, LLMChain
|
|
16 |
#from langchain.chains import LLMChain
|
17 |
from langchain_core.prompts import PromptTemplate
|
18 |
|
19 |
-
# below
|
20 |
from transformers import LlamaForCausalLM
|
21 |
from sentence_transformers import SentenceTransformer
|
|
|
22 |
|
23 |
# adding separator
|
24 |
def add_vertical_space(spaces=1):
|
@@ -87,7 +88,8 @@ def main():
|
|
87 |
docsearch.save_local(DB_FAISS_PATH)
|
88 |
|
89 |
# loading remote llama model
|
90 |
-
llm = LlamaForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf")
|
|
|
91 |
|
92 |
# custom prompt
|
93 |
custom_template="""
|
|
|
16 |
#from langchain.chains import LLMChain
|
17 |
from langchain_core.prompts import PromptTemplate
|
18 |
|
19 |
+
# below 3 libraries are for loading remote models
|
20 |
from transformers import LlamaForCausalLM
|
21 |
from sentence_transformers import SentenceTransformer
|
22 |
+
from ctransformers import AutoModelForCausalLM
|
23 |
|
24 |
# adding separator
|
25 |
def add_vertical_space(spaces=1):
|
|
|
88 |
docsearch.save_local(DB_FAISS_PATH)
|
89 |
|
90 |
# loading remote llama model
|
91 |
+
#llm = LlamaForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf")
|
92 |
+
llm = AutoModelForCausalLM.from_pretrained("google/gemma-1.1-2b-it")
|
93 |
|
94 |
# custom prompt
|
95 |
custom_template="""
|