Chandranshu Jain commited on
Commit
5a7e8e6
1 Parent(s): 068748f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -8
app.py CHANGED
@@ -13,9 +13,10 @@ from langchain.prompts import PromptTemplate
13
  from langchain_community.document_loaders import PyPDFLoader
14
  from langchain_chroma import Chroma
15
  from langchain_community.vectorstores import Chroma
16
- from transformers import AutoTokenizer, AutoModelForCausalLM
17
- import transformers
18
- import torch
 
19
 
20
  #st.set_page_config(page_title="Document Genie", layout="wide")
21
 
@@ -89,11 +90,7 @@ def get_conversational_chain():
89
  model_id = "google/gemma-1.1-2b-it"
90
  dtype = torch.bfloat16
91
 
92
- tokenizer = AutoTokenizer.from_pretrained(model_id)
93
- llm= AutoModelForCausalLM.from_pretrained(
94
- model_id,
95
- torch_dtype=dtype,
96
- )
97
 
98
  pt = ChatPromptTemplate.from_template(template)
99
  # Retrieve and generate using the relevant snippets of the blog.
 
13
  from langchain_community.document_loaders import PyPDFLoader
14
  from langchain_chroma import Chroma
15
  from langchain_community.vectorstores import Chroma
16
+ # Use a pipeline as a high-level helper
17
+ from transformers import pipeline
18
+
19
+
20
 
21
  #st.set_page_config(page_title="Document Genie", layout="wide")
22
 
 
90
  model_id = "google/gemma-1.1-2b-it"
91
  dtype = torch.bfloat16
92
 
93
+ llm = pipeline("text-generation", model="google/gemma-1.1-2b-it")
 
 
 
 
94
 
95
  pt = ChatPromptTemplate.from_template(template)
96
  # Retrieve and generate using the relevant snippets of the blog.