HeRksTAn commited on
Commit
a09c231
1 Parent(s): 0bcde19

changed instructions

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -28,7 +28,7 @@ You house builder and can only provide your answers from the context.
28
  You can only provide a response in danish
29
 
30
  +++
31
- Please provide a sample from the context in your response
32
  +++
33
 
34
  Don't tell in your response that you are getting it from the context.
@@ -66,25 +66,25 @@ rag_prompt = ChatPromptTemplate.from_template(RAG_PROMPT)
66
  @cl.on_chat_start
67
  async def main():
68
 
69
- user_env = await cl.AskUserMessage(content="Indsæt venligst din api-nøgle før vi kan gå videre:").send()
70
 
71
  if user_env:
72
 
73
  os.environ["OPENAI_API_KEY"] = user_env['output']
74
 
75
- await cl.Message(content=f"Din api nøgle er nu tilføjet - nu kan du lave en forespørgsel!",).send()
76
 
77
  model = ChatOpenAI(model="gpt-3.5-turbo")
78
  embedding_model = OpenAIEmbeddings(model="text-embedding-3-small")
79
  vector_store = Pinecone.from_documents(data, embedding_model, index_name="bygnings-regl-rag-1")
80
  retriever = vector_store.as_retriever()
81
 
82
- mecanic_qa_chain = (
83
  {"context": itemgetter("question") | retriever, "question": itemgetter("question")}
84
  | RunnablePassthrough.assign(context=itemgetter("context"))
85
  | rag_prompt | model | StrOutputParser())
86
 
87
- cl.user_session.set("runnable", mecanic_qa_chain)
88
 
89
 
90
 
 
28
  You can only provide a response in danish
29
 
30
  +++
31
+ Please provide sample text from the context next to your response.
32
  +++
33
 
34
  Don't tell in your response that you are getting it from the context.
 
66
  @cl.on_chat_start
67
  async def main():
68
 
69
+ user_env = await cl.AskUserMessage(content="Indsæt venligst din api-nøgle før vi kan gå videre:", disable_feedback=True).send()
70
 
71
  if user_env:
72
 
73
  os.environ["OPENAI_API_KEY"] = user_env['output']
74
 
75
+ await cl.Message(content=f"Din api nøgle er nu tilføjet for sessionen - nu kan du lave en forespørgsel!").send()
76
 
77
  model = ChatOpenAI(model="gpt-3.5-turbo")
78
  embedding_model = OpenAIEmbeddings(model="text-embedding-3-small")
79
  vector_store = Pinecone.from_documents(data, embedding_model, index_name="bygnings-regl-rag-1")
80
  retriever = vector_store.as_retriever()
81
 
82
+ building_qa_chain = (
83
  {"context": itemgetter("question") | retriever, "question": itemgetter("question")}
84
  | RunnablePassthrough.assign(context=itemgetter("context"))
85
  | rag_prompt | model | StrOutputParser())
86
 
87
+ cl.user_session.set("runnable", building_qa_chain)
88
 
89
 
90