Emmanuel Frimpong Asante commited on
Commit
fb975db
·
1 Parent(s): 44a781d

update space

Browse files
Files changed (1) hide show
  1. app.py +140 -108
app.py CHANGED
@@ -6,8 +6,9 @@ import logging
6
  import dotenv
7
  import gradio as gr
8
  import numpy as np
9
- from pymongo import MongoClient
10
  from datetime import datetime
 
11
  from utils import PoultryFarmBot, llama3_response
12
  from transformers import AutoModelForCausalLM, AutoTokenizer
13
 
@@ -20,26 +21,41 @@ logger = logging.getLogger(__name__)
20
 
21
  # MongoDB Setup for logging and audit
22
  MONGO_URI = os.getenv("MONGO_URI")
23
- logger.info("Connecting to MongoDB.")
24
- client = MongoClient(MONGO_URI)
25
- db = client.poultry_farm # Connect to the 'poultry_farm' database
26
- enquiries_collection = db.enquiries # Collection to store farmer enquiries
27
- users_collection = db.users # Collection to store user credentials
28
- logs_collection = db.logs # Collection to store application logs
 
 
 
 
 
 
 
 
 
29
 
30
  def log_to_db(level, message):
31
- log_entry = {
32
- "level": level,
33
- "message": message,
34
- "timestamp": datetime.utcnow()
35
- }
36
- logs_collection.insert_one(log_entry)
 
 
 
37
 
38
  # Override logger methods to also log to MongoDB
39
  class MongoHandler(logging.Handler):
40
  def emit(self, record):
41
  log_entry = self.format(record)
42
- log_to_db(record.levelname, log_entry)
 
 
 
43
 
44
  mongo_handler = MongoHandler()
45
  mongo_handler.setLevel(logging.INFO)
@@ -59,17 +75,21 @@ else:
59
  logger.info("Initializing PoultryFarmBot instance.")
60
  bot = PoultryFarmBot(db)
61
 
62
- # Load Llama 3.1 model and tokenizer for text generation
63
- logger.info("Loading Llama 3.1 model and tokenizer.")
64
- model_name = "meta-llama/Llama-2-7b-hf"
65
- tokenizer = AutoTokenizer.from_pretrained(model_name)
66
- model = AutoModelForCausalLM.from_pretrained(model_name)
67
-
68
- # Set the padding token to EOS token or add a new padding token
69
- if tokenizer.pad_token is None:
70
- logger.info("Adding padding token to tokenizer.")
71
- tokenizer.add_special_tokens({'pad_token': '[PAD]'})
72
- model.resize_token_embeddings(len(tokenizer))
 
 
 
 
73
 
74
  def chatbot_response(image, text, username, password):
75
  """
@@ -84,30 +104,34 @@ def chatbot_response(image, text, username, password):
84
  Returns:
85
  str: Response generated by the chatbot.
86
  """
87
- user = bot.authenticate_user(username, password)
88
- if not user:
89
- return "Authentication failed. Please check your username and password."
90
-
91
- user_id = user['_id']
92
-
93
- # If an image is provided, diagnose the disease
94
- if image is not None:
95
- logger.info("Image input detected. Proceeding with disease diagnosis.")
96
- diagnosis, name, status, recom = bot.diagnose_disease(image)
97
- if name and status and recom:
98
- logger.info("Diagnosis complete.")
99
- bot.log_enquiry("image", "Image Enquiry", diagnosis, user_id)
100
- return diagnosis
 
 
 
 
 
101
  else:
102
- logger.warning("Diagnosis incomplete.")
103
- bot.log_enquiry("image", "Image Enquiry", diagnosis, user_id)
104
- return diagnosis
105
- else:
106
- # Generate a response using Llama 3.2 for general text input
107
- logger.info("Text input detected. Generating response.")
108
- response = llama3_response(text, tokenizer, model)
109
- bot.log_enquiry("text", text, response, user_id)
110
- return response
111
 
112
  # Gradio interface
113
  def build_gradio_interface():
@@ -117,66 +141,74 @@ def build_gradio_interface():
117
  Returns:
118
  gr.Blocks: Gradio Blocks object representing the chatbot interface.
119
  """
120
- logger.info("Building Gradio interface.")
121
- with gr.Blocks(theme=gr.themes.Base()):
122
- gr.Markdown("# 🐔 Poultry Management Chatbot")
123
- gr.Markdown("Welcome! This chatbot helps you manage your poultry with ease. You can upload an image for disease diagnosis or ask any questions about poultry management.")
124
-
125
- chat_history = gr.Chatbot()
126
- with gr.Row():
127
- with gr.Column(scale=1):
128
- fecal_image = gr.Image(
129
- label="Upload Image of Poultry Feces (Optional)",
130
- type="numpy",
131
- elem_id="image-upload",
132
- show_label=True,
133
- )
134
- with gr.Column(scale=2):
135
- user_input = gr.Textbox(
136
- label="Ask a question",
137
- placeholder="Ask about poultry management...",
138
- lines=3,
139
- elem_id="user-input",
140
- )
141
- username = gr.Textbox(
142
- label="Username",
143
- placeholder="Enter your username",
144
- lines=1,
145
- elem_id="username-input",
146
- )
147
- password = gr.Textbox(
148
- label="Password",
149
- placeholder="Enter your password",
150
- type="password",
151
- lines=1,
152
- elem_id="password-input",
153
- )
154
-
155
- output_box = gr.Textbox(
156
- label="Response",
157
- placeholder="Response will appear here...",
158
- interactive=False,
159
- lines=10,
160
- elem_id="output-box",
161
- )
162
-
163
- submit_button = gr.Button(
164
- "Submit",
165
- variant="primary",
166
- elem_id="submit-button"
167
- )
168
- # Connect the submit button to the chatbot response function
169
- submit_button.click(
170
- fn=chatbot_response,
171
- inputs=[fecal_image, user_input, username, password],
172
- outputs=[output_box]
173
- )
174
- logger.info("Gradio interface built successfully.")
175
- return chatbot_interface
 
 
 
 
176
 
177
  # Launch the Gradio interface
178
  if __name__ == "__main__":
179
- logger.info("Launching Gradio interface.")
180
- interface = build_gradio_interface()
181
- # Launch the interface with queuing enabled for concurrent requests
182
- interface.queue().launch(debug=True, share=True)
 
 
 
 
 
6
  import dotenv
7
  import gradio as gr
8
  import numpy as np
9
+ from pymongo import MongoClient, errors
10
  from datetime import datetime
11
+ from werkzeug.security import generate_password_hash, check_password_hash
12
  from utils import PoultryFarmBot, llama3_response
13
  from transformers import AutoModelForCausalLM, AutoTokenizer
14
 
 
21
 
22
  # MongoDB Setup for logging and audit
23
  MONGO_URI = os.getenv("MONGO_URI")
24
+ if not MONGO_URI:
25
+ logger.error("MONGO_URI is not set in the environment variables.")
26
+ raise ValueError("MONGO_URI environment variable is required but not set.")
27
+
28
+ try:
29
+ logger.info("Connecting to MongoDB.")
30
+ client = MongoClient(MONGO_URI, serverSelectionTimeoutMS=5000) # Timeout after 5 seconds
31
+ client.server_info() # Trigger exception if cannot connect
32
+ db = client.poultry_farm # Connect to the 'poultry_farm' database
33
+ enquiries_collection = db.enquiries # Collection to store farmer enquiries
34
+ users_collection = db.users # Collection to store user credentials
35
+ logs_collection = db.logs # Collection to store application logs
36
+ except errors.ServerSelectionTimeoutError as e:
37
+ logger.error(f"Failed to connect to MongoDB: {e}")
38
+ raise ConnectionError("Could not connect to MongoDB. Please check the MONGO_URI and ensure the database is running.")
39
 
40
  def log_to_db(level, message):
41
+ try:
42
+ log_entry = {
43
+ "level": level,
44
+ "message": message,
45
+ "timestamp": datetime.utcnow()
46
+ }
47
+ logs_collection.insert_one(log_entry)
48
+ except Exception as e:
49
+ logger.error(f"Failed to log to database: {e}")
50
 
51
  # Override logger methods to also log to MongoDB
52
  class MongoHandler(logging.Handler):
53
  def emit(self, record):
54
  log_entry = self.format(record)
55
+ try:
56
+ log_to_db(record.levelname, log_entry)
57
+ except Exception as e:
58
+ logger.error(f"Failed to emit log to MongoDB: {e}")
59
 
60
  mongo_handler = MongoHandler()
61
  mongo_handler.setLevel(logging.INFO)
 
75
  logger.info("Initializing PoultryFarmBot instance.")
76
  bot = PoultryFarmBot(db)
77
 
78
+ # Load Llama 3.2 model and tokenizer for text generation
79
+ try:
80
+ logger.info("Loading Llama 3.2 model and tokenizer.")
81
+ model_name = "meta-llama/Llama-3.2-3B"
82
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
83
+ model = AutoModelForCausalLM.from_pretrained(model_name)
84
+
85
+ # Set the padding token to EOS token or add a new padding token
86
+ if tokenizer.pad_token is None:
87
+ logger.info("Adding padding token to tokenizer.")
88
+ tokenizer.add_special_tokens({'pad_token': '[PAD]'})
89
+ model.resize_token_embeddings(len(tokenizer))
90
+ except Exception as e:
91
+ logger.error(f"Failed to load Llama 3.2 model or tokenizer: {e}")
92
+ raise RuntimeError("Could not load the Llama 3.2 model or tokenizer. Please check the configuration.")
93
 
94
  def chatbot_response(image, text, username, password):
95
  """
 
104
  Returns:
105
  str: Response generated by the chatbot.
106
  """
107
+ try:
108
+ user = bot.authenticate_user(username, password)
109
+ if not user:
110
+ return "Authentication failed. Please check your username and password."
111
+
112
+ user_id = user['_id']
113
+
114
+ # If an image is provided, diagnose the disease
115
+ if image is not None:
116
+ logger.info("Image input detected. Proceeding with disease diagnosis.")
117
+ diagnosis, name, status, recom = bot.diagnose_disease(image)
118
+ if name and status and recom:
119
+ logger.info("Diagnosis complete.")
120
+ bot.log_enquiry("image", "Image Enquiry", diagnosis, user_id)
121
+ return diagnosis
122
+ else:
123
+ logger.warning("Diagnosis incomplete.")
124
+ bot.log_enquiry("image", "Image Enquiry", diagnosis, user_id)
125
+ return diagnosis
126
  else:
127
+ # Generate a response using Llama 3.2 for general text input
128
+ logger.info("Text input detected. Generating response.")
129
+ response = llama3_response(text, tokenizer, model)
130
+ bot.log_enquiry("text", text, response, user_id)
131
+ return response
132
+ except Exception as e:
133
+ logger.error(f"Error during chatbot response generation: {e}")
134
+ return "An error occurred while processing your request. Please try again later."
 
135
 
136
  # Gradio interface
137
  def build_gradio_interface():
 
141
  Returns:
142
  gr.Blocks: Gradio Blocks object representing the chatbot interface.
143
  """
144
+ try:
145
+ logger.info("Building Gradio interface.")
146
+ with gr.Blocks(theme=gr.themes.Base()):
147
+ gr.Markdown("# 🐔 Poultry Management Chatbot")
148
+ gr.Markdown("Welcome! This chatbot helps you manage your poultry with ease. You can upload an image for disease diagnosis or ask any questions about poultry management.")
149
+
150
+ chat_history = gr.Chatbot()
151
+ with gr.Row():
152
+ with gr.Column(scale=1):
153
+ fecal_image = gr.Image(
154
+ label="Upload Image of Poultry Feces (Optional)",
155
+ type="numpy",
156
+ elem_id="image-upload",
157
+ show_label=True,
158
+ )
159
+ with gr.Column(scale=2):
160
+ user_input = gr.Textbox(
161
+ label="Ask a question",
162
+ placeholder="Ask about poultry management...",
163
+ lines=3,
164
+ elem_id="user-input",
165
+ )
166
+ username = gr.Textbox(
167
+ label="Username",
168
+ placeholder="Enter your username",
169
+ lines=1,
170
+ elem_id="username-input",
171
+ )
172
+ password = gr.Textbox(
173
+ label="Password",
174
+ placeholder="Enter your password",
175
+ type="password",
176
+ lines=1,
177
+ elem_id="password-input",
178
+ )
179
+
180
+ output_box = gr.Textbox(
181
+ label="Response",
182
+ placeholder="Response will appear here...",
183
+ interactive=False,
184
+ lines=10,
185
+ elem_id="output-box",
186
+ )
187
+
188
+ submit_button = gr.Button(
189
+ "Submit",
190
+ variant="primary",
191
+ elem_id="submit-button"
192
+ )
193
+ # Connect the submit button to the chatbot response function
194
+ submit_button.click(
195
+ fn=chatbot_response,
196
+ inputs=[fecal_image, user_input, username, password],
197
+ outputs=[output_box]
198
+ )
199
+ logger.info("Gradio interface built successfully.")
200
+ return chatbot_interface
201
+ except Exception as e:
202
+ logger.error(f"Error building Gradio interface: {e}")
203
+ raise RuntimeError("Could not build the Gradio interface. Please check the configuration.")
204
 
205
  # Launch the Gradio interface
206
  if __name__ == "__main__":
207
+ try:
208
+ logger.info("Launching Gradio interface.")
209
+ interface = build_gradio_interface()
210
+ # Launch the interface with queuing enabled for concurrent requests
211
+ interface.queue().launch(debug=True, share=True)
212
+ except Exception as e:
213
+ logger.error(f"Failed to launch Gradio interface: {e}")
214
+ raise RuntimeError("Could not launch the Gradio interface. Please check the application setup.")