geekyrakshit commited on
Commit
6d0856c
1 Parent(s): b20052a

update: app.py to start chat on button press

Browse files
Files changed (1) hide show
  1. app.py +12 -5
app.py CHANGED
@@ -7,19 +7,26 @@ from guardrails_genie.llm import OpenAIModel
7
  load_dotenv()
8
  weave.init(project_name="guardrails-genie")
9
 
10
-
11
  openai_model = st.sidebar.selectbox("OpenAI LLM", ["", "gpt-4o-mini", "gpt-4o"])
12
  chat_condition = openai_model != ""
13
 
 
 
 
 
 
 
 
14
 
15
- if chat_condition:
 
16
  st.title("Guardrails Genie")
17
 
18
  # Initialize chat history
19
  if "messages" not in st.session_state:
20
  st.session_state.messages = []
21
 
22
- llm_model = OpenAIModel(model_name="gpt-4o-mini")
23
 
24
  # Display chat messages from history on app rerun
25
  for message in st.session_state.messages:
@@ -37,9 +44,9 @@ if chat_condition:
37
  llm_model, user_prompts=prompt, messages=st.session_state.messages
38
  )
39
  response = response.choices[0].message.content
40
- response += f"\n\n---\n[Explore in Weave]({call.ui_url})"
41
  # Display assistant response in chat message container
42
  with st.chat_message("assistant"):
43
- st.markdown(response)
44
  # Add assistant response to chat history
45
  st.session_state.messages.append({"role": "assistant", "content": response})
 
7
  load_dotenv()
8
  weave.init(project_name="guardrails-genie")
9
 
 
10
  openai_model = st.sidebar.selectbox("OpenAI LLM", ["", "gpt-4o-mini", "gpt-4o"])
11
  chat_condition = openai_model != ""
12
 
13
+ # Use session state to track if the chat has started
14
+ if "chat_started" not in st.session_state:
15
+ st.session_state.chat_started = False
16
+
17
+ # Start chat when button is pressed
18
+ if st.sidebar.button("Start Chat") and chat_condition:
19
+ st.session_state.chat_started = True
20
 
21
+ # Display chat UI if chat has started
22
+ if st.session_state.chat_started:
23
  st.title("Guardrails Genie")
24
 
25
  # Initialize chat history
26
  if "messages" not in st.session_state:
27
  st.session_state.messages = []
28
 
29
+ llm_model = OpenAIModel(model_name=openai_model)
30
 
31
  # Display chat messages from history on app rerun
32
  for message in st.session_state.messages:
 
44
  llm_model, user_prompts=prompt, messages=st.session_state.messages
45
  )
46
  response = response.choices[0].message.content
47
+
48
  # Display assistant response in chat message container
49
  with st.chat_message("assistant"):
50
+ st.markdown(response + f"\n\n---\n[Explore in Weave]({call.ui_url})")
51
  # Add assistant response to chat history
52
  st.session_state.messages.append({"role": "assistant", "content": response})