ndn1954 commited on
Commit
a67dec5
1 Parent(s): 36b4800

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -6
app.py CHANGED
@@ -15,14 +15,18 @@ from langchain.prompts import PromptTemplate
15
  from langchain.vectorstores import Qdrant
16
  from PyPDF2 import PdfReader
17
  import streamlit as st
18
- # Use a pipeline as a high-level helper
19
- from transformers import pipeline
20
-
21
- pipe = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
22
 
 
 
 
23
  # Load model directly
24
- from transformers import AutoModel
25
- model = AutoModel.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML")
 
 
 
 
 
26
 
27
  PROMPT_TEMPLATE = """
28
  Use the following pieces of context enclosed by triple backquotes to answer the question at the end.
 
15
  from langchain.vectorstores import Qdrant
16
  from PyPDF2 import PdfReader
17
  import streamlit as st
 
 
 
 
18
 
19
+ # Use a pipeline as a high-level helper
20
+ #from transformers import pipeline
21
+ #pipe = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
22
  # Load model directly
23
+ #from transformers import AutoModel
24
+ #model = AutoModel.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML")
25
+
26
+ from transformers import AutoModelForCausalLM
27
+ model_id="TheBloke/Llama-2-7B-Chat-GGML"
28
+ tokenizer=AutoTokenizer.from_pretrained(model_id)
29
+ model=AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True)
30
 
31
  PROMPT_TEMPLATE = """
32
  Use the following pieces of context enclosed by triple backquotes to answer the question at the end.