sidcww commited on
Commit
fd99074
·
verified ·
1 Parent(s): 5abcbc8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -9
app.py CHANGED
@@ -3,10 +3,10 @@ from langchain_google_genai import ChatGoogleGenerativeAI
3
  from langchain_google_genai import GoogleGenerativeAIEmbeddings
4
  from langchain.prompts import PromptTemplate
5
  from langchain_community.vectorstores import Chroma
6
- from langchain_text_splitters import CharacterTextSplitter
7
  from langchain.chains.combine_documents import create_stuff_documents_chain
8
  from langchain.chains import create_retrieval_chain
9
- from langchain.vectorstores import Chroma
10
 
11
  # Set your API key
12
  GOOGLE_API_KEY = "AIzaSyCHLS-wFvSYxSTJjkRQQ-FiC5064112Eq8"
@@ -29,17 +29,14 @@ pages = loader.load_and_split(text_splitter)
29
  # Turn the chunks into embeddings and store them in Chroma
30
  vectordb = Chroma.from_documents(pages, embeddings)
31
 
32
- # Configure Chroma as a retriever with top_k=5
33
  retriever = vectordb.as_retriever(search_kwargs={"k": 10})
34
 
35
  # Create the retrieval chain
36
- template = """
37
- You are a helpful AI assistant.
38
- Answer based on the context provided.
39
  context: {context}
40
  input: {input}
41
- answer:
42
- """
43
  prompt = PromptTemplate.from_template(template)
44
  combine_docs_chain = create_stuff_documents_chain(llm, prompt)
45
  retrieval_chain = create_retrieval_chain(retriever, combine_docs_chain)
@@ -48,4 +45,4 @@ retrieval_chain = create_retrieval_chain(retriever, combine_docs_chain)
48
  response = retrieval_chain.invoke({"input": "How do I apply for personal leave?"})
49
 
50
  # Print the answer to the question
51
- print(response["answer"])
 
3
  from langchain_google_genai import GoogleGenerativeAIEmbeddings
4
  from langchain.prompts import PromptTemplate
5
  from langchain_community.vectorstores import Chroma
6
+ from langchain.text_splitter import CharacterTextSplitter
7
  from langchain.chains.combine_documents import create_stuff_documents_chain
8
  from langchain.chains import create_retrieval_chain
9
+ from langchain_community.document_loaders import PyPDFLoader
10
 
11
  # Set your API key
12
  GOOGLE_API_KEY = "AIzaSyCHLS-wFvSYxSTJjkRQQ-FiC5064112Eq8"
 
29
  # Turn the chunks into embeddings and store them in Chroma
30
  vectordb = Chroma.from_documents(pages, embeddings)
31
 
32
+ # Configure Chroma as a retriever with top_k=10
33
  retriever = vectordb.as_retriever(search_kwargs={"k": 10})
34
 
35
  # Create the retrieval chain
36
+ template = """You are a helpful AI assistant. Answer based on the context provided.
 
 
37
  context: {context}
38
  input: {input}
39
+ answer:"""
 
40
  prompt = PromptTemplate.from_template(template)
41
  combine_docs_chain = create_stuff_documents_chain(llm, prompt)
42
  retrieval_chain = create_retrieval_chain(retriever, combine_docs_chain)
 
45
  response = retrieval_chain.invoke({"input": "How do I apply for personal leave?"})
46
 
47
  # Print the answer to the question
48
+ print(response["answer"])