DexterSptizu commited on
Commit
3771898
·
verified ·
1 Parent(s): e947d3c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -3,7 +3,7 @@ from langchain.vectorstores import Chroma
3
  from langchain.embeddings.openai import OpenAIEmbeddings
4
  from langchain.schema import Document
5
  from langchain.chat_models import ChatOpenAI
6
- from langchain.prompts.chat import ChatPromptTemplate
7
  from PyPDF2 import PdfReader
8
  import os
9
 
@@ -54,15 +54,15 @@ def rag_from_pdf(question, pdf_file, api_key):
54
  # Initialize the LLM
55
  llm = ChatOpenAI(model="gpt-3.5-turbo")
56
 
57
- # Create the chat prompt
58
  messages = [
59
- {"role": "system", "content": "You are a helpful assistant answering questions based on the provided PDF document."},
60
- {"role": "user", "content": f"Question: {question}\n\nContext: {context}"}
61
  ]
62
 
63
  # Generate response
64
  response = llm(messages=messages)
65
- return response["choices"][0]["message"]["content"].strip()
66
  except Exception as e:
67
  return f"An error occurred: {str(e)}"
68
 
 
3
  from langchain.embeddings.openai import OpenAIEmbeddings
4
  from langchain.schema import Document
5
  from langchain.chat_models import ChatOpenAI
6
+ from langchain.schema import SystemMessage, HumanMessage
7
  from PyPDF2 import PdfReader
8
  import os
9
 
 
54
  # Initialize the LLM
55
  llm = ChatOpenAI(model="gpt-3.5-turbo")
56
 
57
+ # Create the chat messages
58
  messages = [
59
+ SystemMessage(content="You are a helpful assistant answering questions based on the provided PDF document."),
60
+ HumanMessage(content=f"Question: {question}\n\nContext: {context}")
61
  ]
62
 
63
  # Generate response
64
  response = llm(messages=messages)
65
+ return response.content.strip()
66
  except Exception as e:
67
  return f"An error occurred: {str(e)}"
68