Severian commited on
Commit
ca6876c
·
verified ·
1 Parent(s): ebfa2e3

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +206 -363
chatbot.py CHANGED
@@ -1,387 +1,230 @@
1
- from typing import List, Tuple, Dict, Any, Optional
 
 
 
 
 
2
  import logging
 
 
3
  import re
4
- from data_models import IdeaForm, IDEA_STAGES
5
- from config import DEFAULT_SYSTEM_PROMPT, STAGES
6
- from utils import (
7
- get_llm_response, extract_form_data, save_idea_to_database,
8
- load_idea_from_database, update_idea_in_database,
9
- get_db, clear_database, init_db, create_tables,
10
- perform_web_search, optimize_search_query,
11
- SessionLocal, InnovativeIdea
12
- )
13
 
14
- class InnovativeIdeaChatbot:
15
- def __init__(self):
16
- create_tables()
17
- init_db()
18
- self.idea_form = IdeaForm()
19
- self.chat_history = []
20
- self.idea_id = None
21
- self.current_stage = None
22
- self.api_key = None
23
- self.add_system_message(self.get_initial_greeting())
24
-
25
- def get_initial_greeting(self) -> str:
26
- greeting = """
27
- Welcome to the Innovative Idea Generator! I'm Myamoto, your AI assistant designed to help you refine and develop your innovative ideas.
28
- Here's how we'll work together:
29
- 1. We'll go through 10 stages to explore different aspects of your idea.
30
- 2. At each stage, I'll ask you questions and provide feedback to help you think deeper about your concept.
31
- 3. You can ask me questions at any time or request more information on a topic.
32
- 4. If you want to perform a web search for additional information, just start your message with '@' followed by your search query.
33
- 5. When you're ready to move to the next stage, simply type 'next'.
34
- Let's start by exploring your innovative idea! What's the name of your idea, or would you like help coming up with one?
35
- """
36
- self.greeted = True
37
- return greeting
38
-
39
- def add_system_message(self, message: str):
40
- self.chat_history.append(("System", message))
41
-
42
- def set_api_key(self, api_key: str):
43
- self.api_key = api_key
44
-
45
- def activate_stage(self, stage_name: str) -> Optional[str]:
46
- self.current_stage = stage_name
47
- for stage in STAGES:
48
- if stage["name"] == stage_name:
49
- return f"Let's work on the '{stage_name}' stage. {stage['question']}"
50
- return None
51
-
52
- def process_stage_input(self, stage_name: str, message: str, model: str, system_prompt: str, thinking_budget: int) -> Tuple[List[Tuple[str, str]], Dict[str, Any]]:
53
- if self.current_stage != stage_name:
54
- activation_message = self.activate_stage(stage_name)
55
- if activation_message is None:
56
- error_message = f"Error: Unable to activate stage '{stage_name}'. Please check if the stage name is correct."
57
- self.chat_history.append(("System", error_message))
58
- return self.chat_history, self.idea_form.dict()
59
- self.chat_history.append(("System", activation_message))
60
-
61
- # Check for web search request
62
- if message.startswith('@'):
63
- search_query = message[1:].strip()
64
- optimized_query = optimize_search_query(search_query, model)
65
- search_results = perform_web_search(optimized_query)
66
- self.chat_history.append(("Human", message))
67
- self.chat_history.append(("AI", f"Here are the search results for '{optimized_query}':\n\n{search_results}"))
68
- return self.chat_history, self.idea_form.dict()
69
-
70
- # Generate the prompt for the current stage
71
- stage_prompt = self.generate_prompt_for_stage(stage_name)
72
-
73
- # Use the DEFAULT_SYSTEM_PROMPT from config.py
74
- formatted_system_prompt = DEFAULT_SYSTEM_PROMPT.format(
75
- current_stage=stage_name,
76
- stage_prompt=stage_prompt
77
- )
78
-
79
- # Combine the formatted system prompt and user's input
80
- combined_prompt = f"{formatted_system_prompt}\n\nUser input: {message}"
81
-
82
- # Get LLM response
83
- llm_response = get_llm_response(combined_prompt, model, thinking_budget, self.api_key)
84
 
85
- # Parse the LLM response to extract only the user-facing content
86
- parsed_response = self.parse_llm_response(llm_response)
87
-
88
- # Add the interaction to chat history
89
- self.chat_history.append(("Human", message))
90
- self.chat_history.append(("AI", parsed_response))
91
-
92
- # Extract form data from the LLM response
93
- form_data = extract_form_data(llm_response)
94
-
95
- # Update the idea form
96
- if stage_name in form_data:
97
- setattr(self.idea_form, stage_name.lower().replace(" ", "_"), form_data[stage_name])
98
 
99
- return self.chat_history, self.idea_form.dict()
100
-
101
- def parse_llm_response(self, response: str) -> str:
102
- # Remove content within <form_data> tags
103
- response = re.sub(r'<form_data>.*?</form_data>', '', response, flags=re.DOTALL)
104
-
105
- # Remove content within <reflection> tags
106
- response = re.sub(r'<reflection>.*?</reflection>', '', response, flags=re.DOTALL)
107
-
108
- # Remove content within <analysis> tags
109
- response = re.sub(r'<analysis>.*?</analysis>', '', response, flags=re.DOTALL)
110
-
111
- # Remove content within <summary> tags
112
- response = re.sub(r'<summary>.*?</summary>', '', response, flags=re.DOTALL)
113
-
114
- # Remove content within <step> tags
115
- response = re.sub(r'<step>.*?</step>', '', response, flags=re.DOTALL)
116
-
117
- # Remove any remaining HTML-like tags
118
- response = re.sub(r'<[^>]+>', '', response)
119
-
120
- # Remove extra whitespace and newlines
121
- response = re.sub(r'\s+', ' ', response).strip()
122
-
123
- return response
124
-
125
- def fill_out_form(self, current_stage: str, model: str, thinking_budget: int) -> Dict[str, str]:
126
- form_data = {}
127
- for stage in STAGES:
128
- stage_name = stage["name"]
129
- if stage_name == current_stage:
130
- # Generate new data for the current stage
131
- form_data[stage["field"]] = self.generate_form_data(stage_name, model, thinking_budget)
132
- else:
133
- # Use existing data for other stages
134
- form_data[stage["field"]] = getattr(self.idea_form, stage["field"], "")
135
-
136
- # Update the idea form
137
- for stage in STAGES:
138
- setattr(self.idea_form, stage["field"], form_data[stage["field"]])
139
-
140
- # Save to database
141
  try:
142
- new_session = SessionLocal()
143
- if self.idea_id:
144
- update_idea_in_database(self.idea_id, self.idea_form, new_session)
145
- else:
146
- self.idea_id = save_idea_to_database(self.idea_form, new_session)
147
- new_session.commit()
148
- except Exception as e:
149
- logging.error(f"Error saving idea to database: {str(e)}")
150
- new_session.rollback()
151
- finally:
152
- new_session.close()
153
-
154
- return form_data
155
-
156
- def generate_prompt_for_stage(self, stage: str) -> str:
157
- for s in IDEA_STAGES:
158
- if s.name == stage:
159
- return f"We are currently working on the '{stage}' stage. {s.question}"
160
- return f"We are currently working on the '{stage}' stage. Please provide relevant information."
161
 
162
- def reset(self):
163
- self.chat_history = []
164
- self.idea_form = IdeaForm()
165
- self.idea_id = None
166
- self.current_stage = None
167
- self.add_system_message(self.get_initial_greeting())
168
- try:
169
- new_session = SessionLocal()
170
- clear_database(new_session)
171
- new_session.commit()
172
- except Exception as e:
173
- logging.error(f"Error clearing database: {str(e)}")
174
- new_session.rollback()
175
- finally:
176
- new_session.close()
177
- return self.chat_history, self.idea_form.dict()
178
 
179
- def start_over(self):
180
- self.chat_history = []
181
- self.idea_form = IdeaForm()
182
- self.current_stage = None
183
- self.add_system_message(self.get_initial_greeting())
184
-
185
- try:
186
- new_session = SessionLocal()
187
- # Clear the existing database
188
- clear_database(new_session)
189
-
190
- # Create a new empty idea
191
- new_idea = InnovativeIdea()
192
- new_session.add(new_idea)
193
- new_session.commit()
194
- new_session.refresh(new_idea)
195
-
196
- # Update the idea_id
197
- self.idea_id = new_idea.id
198
-
199
- new_session.close()
200
- except Exception as e:
201
- logging.error(f"Error in start_over: {str(e)}")
202
- if 'new_session' in locals():
203
- new_session.rollback()
204
- new_session.close()
205
-
206
- return self.chat_history, self.idea_form.dict(), STAGES[0]["name"]
207
 
208
- def update_idea_form(self, stage_name: str, form_data: str):
209
- setattr(self.idea_form, stage_name.lower().replace(" ", "_"), form_data)
210
- try:
211
- new_session = SessionLocal()
212
- if self.idea_id:
213
- update_idea_in_database(self.idea_id, self.idea_form, new_session)
214
- else:
215
- self.idea_id = save_idea_to_database(self.idea_form, new_session)
216
- new_session.commit()
217
  except Exception as e:
218
- logging.error(f"Error updating idea form: {str(e)}")
219
- new_session.rollback()
220
- finally:
221
- new_session.close()
222
-
223
- def generate_form_data(self, stage: str, model: str, thinking_budget: int) -> str:
224
- # Prepare the conversation history for the LLM
225
- conversation = "\n".join([f"{role}: {message}" for role, message in self.chat_history])
226
-
227
- stage_prompt = self.generate_prompt_for_stage(stage)
228
-
229
- formatted_system_prompt = DEFAULT_SYSTEM_PROMPT.format(
230
- current_stage=stage,
231
- stage_prompt=stage_prompt
 
 
 
 
 
 
 
 
 
 
 
 
 
232
  )
233
 
234
- prompt = f"""
235
- {formatted_system_prompt}
236
-
237
- Based on the following conversation, extract the relevant information for the '{stage}' stage of the innovative idea:
238
-
239
- {conversation}
240
-
241
- Please provide a concise summary for the '{stage}' stage, focusing only on the information relevant to this stage.
242
- Your response should be structured as follows:
243
-
244
- 1. A brief analysis of the conversation related to this stage.
245
- 2. A concise summary of the key points relevant to this stage.
246
- 3. A suggested form entry for this stage, enclosed in <form_data></form_data> tags.
247
- The form entry should be in the format: "{stage}: Content"
248
-
249
- Remember to keep the form entry concise and directly related to the '{stage}' stage. Do not include information from other stages in the form entry.
250
- """
251
-
252
- # Get LLM response
253
- llm_response = get_llm_response(prompt, model, thinking_budget, self.api_key)
254
-
255
- # Extract form data from the LLM response
256
- form_data = extract_form_data(llm_response)
257
-
258
- return form_data.get(stage, "")
259
-
260
- def process_stage_input_stream(self, stage_name: str, message: str, model: str, system_prompt: str, thinking_budget: int):
261
- if self.current_stage != stage_name:
262
- activation_message = self.activate_stage(stage_name)
263
- if activation_message is None:
264
- error_message = f"Error: Unable to activate stage '{stage_name}'. Please check if the stage name is correct."
265
- self.chat_history.append(("System", error_message))
266
- yield self.chat_history, self.idea_form.dict()
267
- return
268
-
269
- self.chat_history.append(("System", activation_message))
270
-
271
- # Check for web search request
272
- if message.startswith('@'):
273
- search_query = message[1:].strip()
274
- optimized_query = optimize_search_query(search_query, model)
275
- search_results = perform_web_search(optimized_query)
276
- self.chat_history.append(("Human", message))
277
- self.chat_history.append(("AI", f"Here are the search results for '{optimized_query}':\n\n{search_results}"))
278
- yield self.chat_history, self.idea_form.dict()
279
- return
280
-
281
- # Generate the prompt for the current stage
282
- stage_prompt = self.generate_prompt_for_stage(stage_name)
283
-
284
- formatted_system_prompt = DEFAULT_SYSTEM_PROMPT.format(
285
- current_stage=stage_name,
286
- stage_prompt=stage_prompt
 
 
 
 
 
 
 
287
  )
288
 
289
- combined_prompt = f"{formatted_system_prompt}\n\nUser input: {message}"
 
290
 
291
- # Get LLM response
292
- llm_response = get_llm_response(combined_prompt, model, thinking_budget, self.api_key)
293
- parsed_response = self.parse_llm_response(llm_response)
294
-
295
- self.chat_history.append(("Human", message))
296
- self.chat_history.append(("AI", parsed_response))
297
-
298
- form_data = extract_form_data(llm_response)
299
-
300
- if stage_name in form_data:
301
- setattr(self.idea_form, stage_name.lower().replace(" ", "_"), form_data[stage_name])
302
-
303
- yield self.chat_history, self.idea_form.dict()
304
-
305
- def fill_out_form_stream(self, current_stage: str, model: str, thinking_budget: int):
306
- form_data = {}
307
- for stage in IDEA_STAGES:
308
- stage_name = stage.name
309
- if stage_name == current_stage:
310
- form_data[stage_name] = self.generate_form_data(stage_name, model, thinking_budget)
311
  else:
312
- form_data[stage_name] = getattr(self.idea_form, stage.field, "")
313
- yield form_data
314
 
315
- # Update the idea form
316
- for stage in IDEA_STAGES:
317
- setattr(self.idea_form, stage.field, form_data[stage.name])
318
-
319
- # Save to database
320
- try:
321
- new_session = SessionLocal()
322
- if self.idea_id:
323
- update_idea_in_database(self.idea_id, self.idea_form, new_session)
324
- else:
325
- self.idea_id = save_idea_to_database(self.idea_form, new_session)
326
- new_session.commit()
327
- except Exception as e:
328
- logging.error(f"Error saving idea to database: {str(e)}")
329
- new_session.rollback()
330
- finally:
331
- new_session.close()
332
-
333
- def generate_form_data_stream(self, stage: str, model: str, thinking_budget: int):
334
- conversation = "\n".join([f"{role}: {message}" for role, message in self.chat_history])
335
-
336
- stage_prompt = self.generate_prompt_for_stage(stage)
337
-
338
- formatted_system_prompt = DEFAULT_SYSTEM_PROMPT.format(
339
- current_stage=stage,
340
- stage_prompt=stage_prompt
341
  )
342
 
343
- prompt = f"""
344
- {formatted_system_prompt}
345
-
346
- Based on the following conversation, extract the relevant information for the '{stage}' stage of the innovative idea:
347
-
348
- {conversation}
349
-
350
- Please provide a concise summary for the '{stage}' stage, focusing only on the information relevant to this stage.
351
- Your response should be structured as follows:
352
-
353
- 1. A brief analysis of the conversation related to this stage.
354
- 2. A concise summary of the key points relevant to this stage.
355
- 3. A suggested form entry for this stage, enclosed in <form_data></form_data> tags.
356
- The form entry should be in the format: "{stage}: Content"
357
 
358
- Remember to keep the form entry concise and directly related to the '{stage}' stage. Do not include information from other stages in the form entry.
359
- """
 
 
 
360
 
361
- llm_response = get_llm_response(prompt, model, thinking_budget, self.api_key)
362
- form_data = extract_form_data(llm_response)
363
- return form_data.get(stage, "")
364
 
365
- def update_form_field(self, stage_name: str, value: str):
366
- setattr(self.idea_form, stage_name.lower().replace(" ", "_"), value)
367
- try:
368
- # Create a new session for this operation
369
- new_session = SessionLocal()
370
- if self.idea_id:
371
- update_idea_in_database(self.idea_id, self.idea_form, new_session)
372
- else:
373
- self.idea_id = save_idea_to_database(self.idea_form, new_session)
374
- new_session.commit()
375
- except Exception as e:
376
- logging.error(f"Error updating form field: {str(e)}")
377
- # If an error occurs, rollback the new session
378
- new_session.rollback()
379
- finally:
380
- # Always close the new session
381
- new_session.close()
382
- return value
 
 
 
 
 
 
383
 
384
- # Ensure this is at the end of the file
385
  if __name__ == "__main__":
386
- # Any test code or standalone functionality for chatbot.py
387
- pass
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from sqlalchemy.exc import SQLAlchemyError
3
+ from utils import InnovativeIdea, init_db, get_db, SessionLocal, get_llm_response
4
+ from data_models import IdeaForm
5
+ from chatbot import InnovativeIdeaChatbot
6
+ from config import MODELS, DEFAULT_SYSTEM_PROMPT, STAGES
7
  import logging
8
+ import os
9
+ from typing import Dict, Any
10
  import re
11
+ import time
 
 
 
 
 
 
 
 
12
 
13
+ # Set up logging
14
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
+ def create_gradio_interface():
17
+ innovative_chatbot = InnovativeIdeaChatbot()
18
+ default_stage = STAGES[0]["name"]
 
 
 
 
 
 
 
 
 
 
19
 
20
+ # Initialize the database
21
+ try:
22
+ init_db()
23
+ db = next(get_db())
24
+ initial_idea = db.query(InnovativeIdea).first()
25
+ if initial_idea is None:
26
+ logging.info("No initial idea found in the database. Creating a new one.")
27
+ initial_idea = InnovativeIdea()
28
+ db.add(initial_idea)
29
+ db.commit()
30
+ db.refresh(initial_idea)
31
+
32
+ # Create form_fields while the session is still open
33
+ form_fields = {
34
+ stage["name"]: gr.Textbox(
35
+ label=stage["question"],
36
+ placeholder=stage["example"],
37
+ value=getattr(initial_idea, stage["field"], ""),
38
+ visible=(stage["name"] == default_stage),
39
+ interactive=False
40
+ ) for stage in STAGES
41
+ }
42
+
43
+ # Now we can safely close the session
44
+ db.close()
45
+ except SQLAlchemyError as e:
46
+ logging.error(f"Database initialization failed: {str(e)}")
47
+ raise RuntimeError(f"Failed to initialize database: {str(e)}")
48
+
49
+ def chatbot_function(message, history, model, system_prompt, thinking_budget, current_stage):
 
 
 
 
 
 
 
 
 
 
 
 
50
  try:
51
+ # If this is the first message, get the initial greeting
52
+ if not history:
53
+ initial_greeting = innovative_chatbot.get_initial_greeting()
54
+ history.append((None, initial_greeting))
55
+ yield history, "", ""
56
+ return
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
+ for partial_response in innovative_chatbot.process_stage_input_stream(current_stage, message, model, system_prompt, thinking_budget):
59
+ chat_history, form_data = partial_response
60
+ history.append((message, chat_history[-1][1]))
61
+ yield history, form_data.get(current_stage, ""), ""
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
+ # Update the database with the new form data
64
+ db = SessionLocal()
65
+ idea = db.query(InnovativeIdea).first()
66
+ for key, value in form_data.items():
67
+ setattr(idea, key, value)
68
+ db.commit()
69
+ db.close()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
 
 
 
 
 
 
 
 
 
 
71
  except Exception as e:
72
+ logging.error(f"An error occurred in chatbot_function: {str(e)}", exc_info=True)
73
+ yield history + [(None, f"An error occurred: {str(e)}")], "", ""
74
+
75
+ def fill_form(stage, model, thinking_budget):
76
+ form_data = innovative_chatbot.fill_out_form(stage, model, thinking_budget)
77
+ return [form_data.get(stage["field"], "") for stage in STAGES]
78
+
79
+ def clear_chat():
80
+ # Reset the database to an empty form
81
+ db = SessionLocal()
82
+ idea = db.query(InnovativeIdea).first()
83
+ empty_form = IdeaForm()
84
+ for key, value in empty_form.dict().items():
85
+ setattr(idea, key, value)
86
+ db.commit()
87
+ db.close()
88
+
89
+ chat_history, form_data = innovative_chatbot.reset()
90
+ return chat_history, *[form_data.get(stage["field"], "") for stage in STAGES]
91
+
92
+ def start_over():
93
+ chat_history, form_data, initial_stage = innovative_chatbot.start_over()
94
+ return (
95
+ chat_history, # Update the chatbot with the new chat history
96
+ "", # Clear the message input
97
+ *[form_data.get(stage["field"], "") for stage in STAGES], # Reset all form fields
98
+ gr.update(value=initial_stage) # Reset the stage selection
99
  )
100
 
101
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
102
+ gr.Markdown("# Innovative Idea Generator")
103
+
104
+ mode = gr.Radio(["Chatbot", "Direct Input"], label="Mode", value="Chatbot")
105
+
106
+ with gr.Row():
107
+ with gr.Column(scale=2):
108
+ chatbot = gr.Chatbot(label="Conversation", height=500)
109
+ msg = gr.Textbox(label="Your input", placeholder="Type your brilliant idea here...")
110
+
111
+ with gr.Row():
112
+ submit = gr.Button("Submit")
113
+ clear = gr.Button("Clear Chat")
114
+ start_over_btn = gr.Button("Start Over")
115
+
116
+ with gr.Column(scale=1):
117
+ stages = gr.Radio(
118
+ choices=[stage["name"] for stage in STAGES],
119
+ label="Ideation Stages",
120
+ value=default_stage
121
+ )
122
+ form_fields = {
123
+ stage["name"]: gr.Textbox(
124
+ label=stage["question"],
125
+ placeholder=stage["example"],
126
+ value=getattr(initial_idea, stage["field"], ""),
127
+ visible=(stage["name"] == default_stage),
128
+ interactive=False
129
+ ) for stage in STAGES
130
+ }
131
+ fill_form_btn = gr.Button("Fill out Form")
132
+ submit_form_btn = gr.Button("Submit Form", visible=False)
133
+
134
+ with gr.Accordion("Advanced Settings", open=False):
135
+ model = gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[0])
136
+ system_prompt = gr.Textbox(label="System Prompt", value=DEFAULT_SYSTEM_PROMPT, lines=5)
137
+ thinking_budget = gr.Slider(minimum=1, maximum=4098, value=2048, step=1, label="Max New Tokens")
138
+ api_key = gr.Textbox(label="Hugging Face API Key", type="password")
139
+
140
+ # Event handlers
141
+ msg.submit(chatbot_function,
142
+ inputs=[msg, chatbot, model, system_prompt, thinking_budget, stages],
143
+ outputs=[chatbot, form_fields[default_stage], msg])
144
+
145
+ submit.click(chatbot_function,
146
+ inputs=[msg, chatbot, model, system_prompt, thinking_budget, stages],
147
+ outputs=[chatbot, form_fields[default_stage], msg])
148
+
149
+ fill_form_btn.click(fill_form,
150
+ inputs=[stages, model, thinking_budget],
151
+ outputs=list(form_fields.values()))
152
+
153
+ clear.click(clear_chat,
154
+ outputs=[chatbot] + list(form_fields.values()))
155
+
156
+ # Update form field visibility based on selected stage
157
+ stages.change(
158
+ lambda s: [gr.update(visible=(stage["name"] == s)) for stage in STAGES],
159
+ inputs=[stages],
160
+ outputs=list(form_fields.values())
161
  )
162
 
163
+ # Update API key when changed
164
+ api_key.change(innovative_chatbot.set_api_key, inputs=[api_key])
165
 
166
+ # Toggle between chatbot and direct input mode
167
+ def toggle_mode(new_mode):
168
+ if new_mode == "Direct Input":
169
+ return [gr.update(visible=False)] * 3 + [gr.update(interactive=True)] * len(STAGES) + [gr.update(visible=True)]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
170
  else:
171
+ return [gr.update(visible=True)] * 3 + [gr.update(interactive=False)] * len(STAGES) + [gr.update(visible=False)]
 
172
 
173
+ mode.change(
174
+ toggle_mode,
175
+ inputs=[mode],
176
+ outputs=[chatbot, msg, submit] + list(form_fields.values()) + [submit_form_btn]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  )
178
 
179
+ # Handle direct form submission
180
+ submit_form_btn.click(
181
+ lambda *values: values,
182
+ inputs=[form_fields[stage["name"]] for stage in STAGES],
183
+ outputs=[form_fields[stage["name"]] for stage in STAGES]
184
+ )
 
 
 
 
 
 
 
 
185
 
186
+ # Add this new event handler for the Start Over button
187
+ start_over_btn.click(
188
+ start_over,
189
+ outputs=[chatbot, msg] + [form_fields[stage["name"]] for stage in STAGES] + [stages]
190
+ )
191
 
192
+ # Add this new event handler to display the initial greeting when the interface loads
193
+ demo.load(lambda: ([[None, innovative_chatbot.get_initial_greeting()]], ""),
194
+ outputs=[chatbot, msg])
195
 
196
+ # Add this new event handler to update form fields when they change
197
+ for stage in STAGES:
198
+ form_fields[stage["name"]].change(
199
+ lambda value, s=stage["name"]: innovative_chatbot.update_form_field(s, value),
200
+ inputs=[form_fields[stage["name"]]],
201
+ outputs=[form_fields[stage["name"]]]
202
+ )
203
+
204
+ return demo
205
+
206
+ def main():
207
+ try:
208
+ demo = create_gradio_interface()
209
+ return demo
210
+ except ImportError as e:
211
+ logging.error(f"Import error: {str(e)}", exc_info=True)
212
+ print(f"An import error occurred: {str(e)}")
213
+ print("Please check your import statements and ensure there are no circular dependencies.")
214
+ return None
215
+ except Exception as e:
216
+ logging.error(f"Failed to initialize application: {str(e)}", exc_info=True)
217
+ print(f"An unexpected error occurred: {str(e)}")
218
+ print("Please check the log file for more details.")
219
+ return None
220
 
 
221
  if __name__ == "__main__":
222
+ try:
223
+ demo = main()
224
+ if demo:
225
+ demo.launch()
226
+ except Exception as e:
227
+ logging.error(f"Failed to start the application: {str(e)}", exc_info=True)
228
+ print(f"An error occurred while starting the application: {str(e)}")
229
+ print("Please check the log file for more details.")
230
+ # You might want to add a more user-friendly error message or UI here