HeRksTAn commited on
Commit
9e4e42d
1 Parent(s): ad96042

now able to set your own api key

Browse files
Files changed (1) hide show
  1. app.py +22 -16
app.py CHANGED
@@ -12,11 +12,6 @@ from langchain.schema.runnable.config import RunnableConfig
12
  from langchain_core.output_parsers import StrOutputParser
13
  from langchain.text_splitter import RecursiveCharacterTextSplitter
14
  from langchain_community.document_loaders import UnstructuredPDFLoader
15
- from langchain_community.document_loaders import OnlinePDFLoader
16
- from langchain_community.document_loaders import PyPDFLoader
17
-
18
-
19
-
20
 
21
 
22
  load_dotenv()
@@ -87,26 +82,37 @@ loader = UnstructuredPDFLoader("br_femogfirs.pdf", strategy="fast")
87
  data = loader.load_and_split(text_splitter)
88
  # data = loader.load()
89
 
90
- embedding_model = OpenAIEmbeddings(model="text-embedding-3-small")
91
 
92
- vector_store = Pinecone.from_documents(data, embedding_model, index_name="bygnings-regl-rag-1")
93
- retriever = vector_store.as_retriever()
94
 
95
  rag_prompt = ChatPromptTemplate.from_template(RAG_PROMPT)
96
 
97
- model = ChatOpenAI(model="gpt-3.5-turbo")
98
 
99
  @cl.on_chat_start
100
- async def main():
101
- mecanic_qa_chain = ""
102
- mecanic_qa_chain = (
103
- {"context": itemgetter("question") | retriever, "question": itemgetter("question")}
104
- | RunnablePassthrough.assign(context=itemgetter("context"))
105
- | rag_prompt | model | StrOutputParser()
106
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
107
 
108
  cl.user_session.set("runnable", mecanic_qa_chain)
109
 
 
 
110
  @cl.on_message
111
  async def on_message(message: cl.Message):
112
  runnable = cl.user_session.get("runnable")
 
12
  from langchain_core.output_parsers import StrOutputParser
13
  from langchain.text_splitter import RecursiveCharacterTextSplitter
14
  from langchain_community.document_loaders import UnstructuredPDFLoader
 
 
 
 
 
15
 
16
 
17
  load_dotenv()
 
82
  data = loader.load_and_split(text_splitter)
83
  # data = loader.load()
84
 
 
85
 
 
 
86
 
87
  rag_prompt = ChatPromptTemplate.from_template(RAG_PROMPT)
88
 
 
89
 
90
  @cl.on_chat_start
91
+ async def main():
92
+
93
+ user_env = await cl.AskUserMessage(content="Please enter your OpenAI API Key:").send()
94
+
95
+ if user_env:
96
+
97
+ await cl.Message(content=f"{user_env['output']}",).send()
98
+
99
+ os.environ["OPENAI_API_KEY"] = user_env['output']
100
+
101
+ model = ChatOpenAI(model="gpt-3.5-turbo")
102
+ embedding_model = OpenAIEmbeddings(model="text-embedding-3-small")
103
+ vector_store = Pinecone.from_documents(data, embedding_model, index_name="bygnings-regl-rag-1")
104
+ retriever = vector_store.as_retriever()
105
+
106
+
107
+ mecanic_qa_chain = (
108
+ {"context": itemgetter("question") | retriever, "question": itemgetter("question")}
109
+ | RunnablePassthrough.assign(context=itemgetter("context"))
110
+ | rag_prompt | model | StrOutputParser())
111
 
112
  cl.user_session.set("runnable", mecanic_qa_chain)
113
 
114
+
115
+
116
  @cl.on_message
117
  async def on_message(message: cl.Message):
118
  runnable = cl.user_session.get("runnable")