Spaces:
Sleeping
Sleeping
| document.addEventListener('DOMContentLoaded', () => { | |
| // DOM Elements | |
| const chatBox = document.getElementById('chat-box'); | |
| const textInput = document.getElementById('text-input'); | |
| const sendBtn = document.getElementById('send-btn'); | |
| const micBtn = document.getElementById('mic-btn'); | |
| const loadingIndicator = document.getElementById('loading-indicator'); | |
| const statusIndicator = document.getElementById('status-indicator'); | |
| // Mode Buttons | |
| const modeTextBtn = document.getElementById('mode-text'); | |
| const modeVoiceBtn = document.getElementById('mode-voice'); | |
| const modeVideoBtn = document.getElementById('mode-video'); | |
| // Voice Controls | |
| const voiceControls = document.getElementById('voice-controls'); | |
| const continuousToggle = document.getElementById('continuous-toggle'); | |
| const rateSlider = document.getElementById('rate'); | |
| const pitchSlider = document.getElementById('pitch'); | |
| // Video Elements | |
| const videoFeed = document.getElementById('video-feed'); | |
| const canvas = document.getElementById('canvas'); | |
| const imageModal = document.getElementById('image-capture-modal'); | |
| const closeModalBtn = document.getElementById('close-modal-btn'); | |
| // Upload Elements (Optional - if you want file upload capability) | |
| const fileUploadBtn = document.getElementById('file-upload-btn'); | |
| const fileInput = document.getElementById('file-input'); | |
| // State Variables | |
| let sessionId = null; | |
| let currentMode = 'text'; | |
| let isListening = false; | |
| let isContinuousMode = false; | |
| let videoStream = null; | |
| let systemStatus = 'disconnected'; | |
| // Updated API Base URL for local development | |
| const API_BASE_URL = 'https://nitinbot001-medbot-backend.hf.space'; | |
| // Speech Recognition (STT) Setup | |
| const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; | |
| let recognition; | |
| if (SpeechRecognition) { | |
| recognition = new SpeechRecognition(); | |
| recognition.continuous = false; | |
| recognition.interimResults = false; | |
| recognition.lang = 'en-US'; | |
| } else { | |
| console.warn('Speech Recognition not supported'); | |
| if (micBtn) micBtn.disabled = true; | |
| } | |
| // Speech Synthesis (TTS) Setup | |
| const synth = window.speechSynthesis; | |
| // --- INITIALIZATION --- | |
| function initializeApp() { | |
| console.log('MediBot initializing...'); | |
| checkSystemHealth(); | |
| loadHistory(); | |
| setupEventListeners(); | |
| updateStatusIndicator('ready', 'Ready'); | |
| addMessageToUI('ai', 'Hello! I am your MediBot Assistant. I can help you with medical information, analyze images of medicines, and answer questions about diseases. How can I assist you today?'); | |
| console.log('MediBot initialization complete'); | |
| } | |
| async function checkSystemHealth() { | |
| console.log('Checking system health...'); | |
| try { | |
| const response = await fetch(`${API_BASE_URL}/health`); | |
| const data = await response.json(); | |
| if (response.ok && data.status.includes('Running')) { | |
| systemStatus = 'connected'; | |
| updateStatusIndicator('connected', 'System Ready'); | |
| console.log('System health check passed:', data); | |
| // Display system info only if there are loaded files | |
| if (data.disease_fact_sheets > 0 || data.medicine_knowledge_files > 0) { | |
| console.log(`System loaded with ${data.disease_fact_sheets} disease fact sheets and ${data.medicine_knowledge_files} medicine knowledge files`); | |
| } | |
| } else { | |
| throw new Error('System health check failed'); | |
| } | |
| } catch (error) { | |
| systemStatus = 'disconnected'; | |
| updateStatusIndicator('error', 'System Offline'); | |
| console.error('System health check failed:', error); | |
| addMessageToUI('error', 'Unable to connect to the medical system. Please ensure the backend server is running on localhost:5000.'); | |
| } | |
| } | |
| async function startNewSession() { | |
| if (systemStatus !== 'connected') { | |
| console.error('Cannot start session - system not connected'); | |
| addMessageToUI('error', 'System is not connected. Please refresh the page and try again.'); | |
| return false; | |
| } | |
| console.log('Starting new session...'); | |
| updateStatusIndicator('connecting', 'Starting session...'); | |
| try { | |
| const response = await fetch(`${API_BASE_URL}/start_session`, { | |
| method: 'POST', | |
| headers: { | |
| 'Content-Type': 'application/json' | |
| } | |
| }); | |
| if (!response.ok) { | |
| const errorData = await response.json(); | |
| throw new Error(errorData.error || 'Failed to start session'); | |
| } | |
| const data = await response.json(); | |
| sessionId = data.session_id; | |
| updateStatusIndicator('connected', 'Session Active'); | |
| console.log('New session started:', sessionId); | |
| return true; | |
| } catch (error) { | |
| console.error('Session start error:', error); | |
| sessionId = null; | |
| updateStatusIndicator('error', 'Session Failed'); | |
| addMessageToUI('error', `Could not start session: ${error.message}`); | |
| return false; | |
| } | |
| } | |
| // --- EVENT LISTENERS --- | |
| function setupEventListeners() { | |
| console.log('Setting up event listeners...'); | |
| // Text input handling | |
| if (sendBtn) sendBtn.addEventListener('click', handleTextInput); | |
| if (textInput) { | |
| textInput.addEventListener('keydown', (e) => { | |
| if (e.key === 'Enter' && !e.shiftKey) { | |
| e.preventDefault(); | |
| handleTextInput(); | |
| } | |
| }); | |
| } | |
| // Mode switching | |
| if (modeTextBtn) modeTextBtn.addEventListener('click', () => switchMode('text')); | |
| if (modeVoiceBtn) modeVoiceBtn.addEventListener('click', () => switchMode('voice')); | |
| if (modeVideoBtn) modeVideoBtn.addEventListener('click', () => switchMode('video')); | |
| // Voice controls | |
| if (micBtn) micBtn.addEventListener('click', toggleListening); | |
| if (continuousToggle) { | |
| continuousToggle.addEventListener('change', (e) => { | |
| isContinuousMode = e.target.checked; | |
| console.log('Continuous mode toggled:', isContinuousMode); | |
| if (recognition) { | |
| recognition.continuous = isContinuousMode; | |
| } | |
| }); | |
| } | |
| // File upload (optional) | |
| if (fileUploadBtn && fileInput) { | |
| fileUploadBtn.addEventListener('click', () => fileInput.click()); | |
| fileInput.addEventListener('change', handleFileUpload); | |
| } | |
| // Speech recognition events | |
| if (recognition) { | |
| recognition.onstart = () => { | |
| console.log('Speech recognition started'); | |
| isListening = true; | |
| if (micBtn) { | |
| micBtn.classList.add('listening'); | |
| micBtn.innerHTML = '<i class="fas fa-stop"></i>'; | |
| } | |
| updateStatusIndicator('listening', 'Listening...'); | |
| }; | |
| recognition.onend = () => { | |
| console.log('Speech recognition ended'); | |
| isListening = false; | |
| if (micBtn) { | |
| micBtn.classList.remove('listening'); | |
| micBtn.innerHTML = '<i class="fas fa-microphone"></i>'; | |
| } | |
| updateStatusIndicator('connected', 'Session Active'); | |
| if (isContinuousMode && currentMode !== 'text') { | |
| console.log('Restarting speech recognition in continuous mode'); | |
| setTimeout(() => recognition.start(), 1000); | |
| } | |
| }; | |
| recognition.onresult = (event) => { | |
| const transcript = event.results[event.results.length - 1][0].transcript.trim(); | |
| console.log('Speech recognition result:', transcript); | |
| if (textInput) textInput.value = transcript; | |
| if (transcript) processUserQuery(transcript); | |
| }; | |
| recognition.onerror = (event) => { | |
| console.error('Speech recognition error:', event.error); | |
| addMessageToUI('error', `Speech recognition error: ${event.error}`); | |
| updateStatusIndicator('error', 'Speech Error'); | |
| }; | |
| } | |
| // Image capture modal | |
| if (closeModalBtn) { | |
| closeModalBtn.addEventListener('click', () => { | |
| console.log('Image capture modal closed, starting capture...'); | |
| if (imageModal) imageModal.classList.add('hidden'); | |
| setTimeout(captureAndSendImage, 2000); | |
| }); | |
| } | |
| // System health check interval | |
| setInterval(checkSystemHealth, 30000); // Check every 30 seconds | |
| console.log('Event listeners setup complete'); | |
| } | |
| // --- CORE LOGIC --- | |
| function handleTextInput() { | |
| const query = textInput ? textInput.value.trim() : ''; | |
| if (query) { | |
| console.log('Processing text input:', query); | |
| processUserQuery(query); | |
| if (textInput) textInput.value = ''; | |
| } | |
| } | |
| async function handleFileUpload(event) { | |
| const file = event.target.files[0]; | |
| if (!file) return; | |
| console.log('File selected for upload:', file.name, file.type, file.size); | |
| // Validate file type | |
| const allowedTypes = ['image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp', 'image/webp']; | |
| if (!allowedTypes.includes(file.type)) { | |
| console.error('Invalid file type:', file.type); | |
| addMessageToUI('error', 'Please select a valid image file (JPEG, PNG, GIF, BMP, or WebP).'); | |
| return; | |
| } | |
| // Check file size (16MB limit) | |
| if (file.size > 16 * 1024 * 1024) { | |
| console.error('File size too large:', file.size); | |
| addMessageToUI('error', 'File size too large. Please select an image under 16MB.'); | |
| return; | |
| } | |
| const query = prompt('Please describe what you want to know about this image:'); | |
| if (!query) return; | |
| console.log('Processing file upload with query:', query); | |
| await processImageQuery(query, file); | |
| } | |
| async function processUserQuery(query) { | |
| console.log('Processing user query:', query); | |
| addMessageToUI('user', query); | |
| showLoading(true); | |
| // Start a new session for every query | |
| const sessionStarted = await startNewSession(); | |
| if (!sessionStarted) { | |
| showLoading(false); | |
| return; | |
| } | |
| try { | |
| console.log('Sending query to API...'); | |
| const response = await fetch(`${API_BASE_URL}/process_query`, { | |
| method: 'POST', | |
| headers: { | |
| 'Content-Type': 'application/json' | |
| }, | |
| body: JSON.stringify({ | |
| session_id: sessionId, | |
| query: query | |
| }) | |
| }); | |
| const data = await response.json(); | |
| console.log('API response received:', data); | |
| if (!response.ok) { | |
| throw new Error(data.error || `Server error: ${response.status}`); | |
| } | |
| if (data.status === 'image_required') { | |
| console.log('Image required for query'); | |
| handleImageRequest(data.message, data.category); | |
| } else if (data.status === 'success') { | |
| console.log('Query processed successfully'); | |
| handleApiResponse(data.response, data.category); | |
| } else { | |
| throw new Error('Unexpected response format'); | |
| } | |
| } catch (error) { | |
| console.error('Error processing query:', error); | |
| handleApiResponse(`Sorry, I encountered an error: ${error.message}`, null, true); | |
| } finally { | |
| showLoading(false); | |
| } | |
| } | |
| async function processImageQuery(query, imageFile) { | |
| console.log('Processing image query:', query, 'with file:', imageFile.name); | |
| addMessageToUI('user', query); | |
| showLoading(true); | |
| const sessionStarted = await startNewSession(); | |
| if (!sessionStarted) { | |
| showLoading(false); | |
| return; | |
| } | |
| try { | |
| console.log('Sending image to API...'); | |
| const formData = new FormData(); | |
| formData.append('session_id', sessionId); | |
| formData.append('photo', imageFile); | |
| const response = await fetch(`${API_BASE_URL}/process_with_image`, { | |
| method: 'POST', | |
| body: formData | |
| }); | |
| const data = await response.json(); | |
| console.log('Image API response received:', data); | |
| if (!response.ok) { | |
| throw new Error(data.error || `Server error: ${response.status}`); | |
| } | |
| if (data.status === 'success') { | |
| console.log('Image processed successfully'); | |
| handleApiResponse(data.response, data.category); | |
| } else { | |
| throw new Error('Unexpected response format'); | |
| } | |
| } catch (error) { | |
| console.error('Error processing image query:', error); | |
| handleApiResponse(`Sorry, I couldn't process the image: ${error.message}`, null, true); | |
| } finally { | |
| showLoading(false); | |
| } | |
| } | |
| function handleApiResponse(responseData, category, isError = false) { | |
| let message = ''; | |
| if (isError) { | |
| message = responseData; | |
| console.error('API Error:', responseData); | |
| } else { | |
| console.log('Handling API response:', responseData, 'Category:', category); | |
| // Handle different response formats from the integrated backend | |
| if (typeof responseData === 'string') { | |
| message = responseData; | |
| } else if (responseData && responseData.response) { | |
| message = responseData.response; | |
| } else if (responseData && responseData.data) { | |
| message = responseData.data; | |
| } else { | |
| message = JSON.stringify(responseData, null, 2); | |
| } | |
| } | |
| const type = isError ? 'error' : 'ai'; | |
| addMessageToUI(type, message, category); | |
| // Add category info if available | |
| if (category && !isError) { | |
| console.log('Response category:', category); | |
| } | |
| // Text-to-speech for voice/video modes | |
| if (!isError && (currentMode === 'voice' || currentMode === 'video')) { | |
| console.log('Speaking response in', currentMode, 'mode'); | |
| speak(message); | |
| } | |
| } | |
| function getCategoryInfo(category) { | |
| const categoryMap = { | |
| 'disease_query': 'Category: General Disease Information', | |
| 'medicine_info': 'Category: Medicine Information & Analysis', | |
| 'skin_disease': 'Category: Skin Condition Analysis', | |
| 'report_reading': 'Category: Medical Report Interpretation' | |
| }; | |
| return categoryMap[category] || null; | |
| } | |
| // --- UI & STATE MANAGEMENT --- | |
| function updateStatusIndicator(status, message) { | |
| if (!statusIndicator) return; | |
| const colors = { | |
| 'ready': '#cccccc', | |
| 'connected': '#76ff03', | |
| 'connecting': '#ffeb3b', | |
| 'listening': '#2196f3', | |
| 'error': '#ff4d4d', | |
| 'disconnected': '#ff9800' | |
| }; | |
| statusIndicator.textContent = `● ${message}`; | |
| statusIndicator.style.color = colors[status] || '#cccccc'; | |
| console.log('Status updated:', status, message); | |
| } | |
| function switchMode(newMode) { | |
| if (currentMode === newMode) return; | |
| console.log('Switching mode from', currentMode, 'to', newMode); | |
| // Cleanup current mode | |
| if (currentMode === 'video') stopCamera(); | |
| if (isListening && recognition) recognition.stop(); | |
| currentMode = newMode; | |
| // Update UI | |
| document.querySelectorAll('.mode-btn').forEach(btn => btn.classList.remove('active')); | |
| const newModeBtn = document.getElementById(`mode-${newMode}`); | |
| if (newModeBtn) newModeBtn.classList.add('active'); | |
| // Mode-specific setup | |
| switch (newMode) { | |
| case 'text': | |
| document.body.classList.remove('body-video-mode'); | |
| if (voiceControls) voiceControls.classList.add('hidden'); | |
| if (micBtn) micBtn.classList.add('hidden'); | |
| if (sendBtn) sendBtn.classList.remove('hidden'); | |
| if (textInput) textInput.classList.remove('hidden'); | |
| if (videoFeed) videoFeed.style.display = 'none'; | |
| break; | |
| case 'voice': | |
| document.body.classList.remove('body-video-mode'); | |
| if (voiceControls) voiceControls.classList.remove('hidden'); | |
| if (micBtn) micBtn.classList.remove('hidden'); | |
| if (sendBtn) sendBtn.classList.add('hidden'); | |
| if (textInput) textInput.classList.add('hidden'); | |
| if (videoFeed) videoFeed.style.display = 'none'; | |
| break; | |
| case 'video': | |
| document.body.classList.add('body-video-mode'); | |
| if (voiceControls) voiceControls.classList.remove('hidden'); | |
| if (micBtn) micBtn.classList.remove('hidden'); | |
| if (sendBtn) sendBtn.classList.add('hidden'); | |
| if (textInput) textInput.classList.add('hidden'); | |
| if (videoFeed) videoFeed.style.display = 'block'; | |
| startCamera(); | |
| break; | |
| } | |
| console.log('Mode switch completed to:', newMode); | |
| } | |
| function addMessageToUI(sender, text, category = null) { | |
| if (!chatBox) return; | |
| const messageDiv = document.createElement('div'); | |
| messageDiv.classList.add('message', `${sender}-message`); | |
| // Add category class if provided | |
| if (category) { | |
| messageDiv.classList.add(`category-${category}`); | |
| } | |
| // Handle different message types | |
| if (sender === 'system') { | |
| messageDiv.style.fontStyle = 'italic'; | |
| messageDiv.style.color = '#888'; | |
| messageDiv.style.fontSize = '0.9em'; | |
| } | |
| messageDiv.textContent = text; | |
| chatBox.appendChild(messageDiv); | |
| chatBox.scrollTop = chatBox.scrollHeight; | |
| saveHistory(); | |
| } | |
| function showLoading(show) { | |
| if (loadingIndicator) { | |
| loadingIndicator.style.display = show ? 'flex' : 'none'; | |
| } | |
| console.log('Loading indicator:', show ? 'shown' : 'hidden'); | |
| } | |
| // --- VOICE & VIDEO --- | |
| function toggleListening() { | |
| if (!recognition) { | |
| console.error('Speech recognition not supported'); | |
| addMessageToUI('error', 'Speech recognition is not supported in this browser.'); | |
| return; | |
| } | |
| console.log('Toggling listening, current state:', isListening); | |
| if (isListening) { | |
| recognition.stop(); | |
| } else { | |
| recognition.start(); | |
| } | |
| } | |
| function speak(text) { | |
| if (!synth || synth.speaking) { | |
| console.warn('Speech synthesis not available or already speaking'); | |
| return; | |
| } | |
| if (text && text.trim() !== '') { | |
| console.log('Speaking text:', text.substring(0, 50) + '...'); | |
| const utterance = new SpeechSynthesisUtterance(text); | |
| // Try to find a female voice | |
| const voices = synth.getVoices(); | |
| const femaleVoice = voices.find(voice => | |
| voice.name.toLowerCase().includes('female') || | |
| voice.gender === 'female' || | |
| voice.name.toLowerCase().includes('zira') || | |
| voice.name.toLowerCase().includes('hazel') | |
| ); | |
| if (femaleVoice) { | |
| utterance.voice = femaleVoice; | |
| console.log('Using voice:', femaleVoice.name); | |
| } | |
| // Apply voice settings | |
| if (pitchSlider) utterance.pitch = parseFloat(pitchSlider.value); | |
| if (rateSlider) utterance.rate = parseFloat(rateSlider.value); | |
| utterance.onerror = (event) => { | |
| console.error('Speech synthesis error:', event.error); | |
| }; | |
| synth.speak(utterance); | |
| } | |
| } | |
| async function startCamera() { | |
| console.log('Starting camera...'); | |
| try { | |
| videoStream = await navigator.mediaDevices.getUserMedia({ | |
| video: { width: 640, height: 480 }, | |
| audio: false | |
| }); | |
| if (videoFeed) videoFeed.srcObject = videoStream; | |
| console.log('Camera started successfully'); | |
| } catch (err) { | |
| console.error("Error accessing camera:", err); | |
| addMessageToUI('error', 'Could not access the camera. Please grant permission and try again.'); | |
| switchMode('voice'); | |
| } | |
| } | |
| function stopCamera() { | |
| console.log('Stopping camera...'); | |
| if (videoStream) { | |
| videoStream.getTracks().forEach(track => track.stop()); | |
| if (videoFeed) videoFeed.srcObject = null; | |
| videoStream = null; | |
| console.log('Camera stopped'); | |
| } | |
| } | |
| function handleImageRequest(message, category) { | |
| console.log('Image request received:', message, 'Category:', category); | |
| addMessageToUI('ai', message); | |
| if (currentMode !== 'video') { | |
| addMessageToUI('ai', "Please switch to Video mode to capture an image, or use the file upload option in Text mode."); | |
| } else { | |
| if (imageModal) imageModal.classList.remove('hidden'); | |
| } | |
| } | |
| async function captureAndSendImage() { | |
| console.log('Capturing image...'); | |
| if (!videoStream || !sessionId) { | |
| console.error('Cannot capture image - missing video stream or session'); | |
| addMessageToUI('error', 'Cannot capture image. Video stream or session is not active.'); | |
| return; | |
| } | |
| if (!videoFeed || !canvas) { | |
| console.error('Video capture elements not found'); | |
| addMessageToUI('error', 'Video capture elements not found.'); | |
| return; | |
| } | |
| const videoTrack = videoStream.getVideoTracks()[0]; | |
| const settings = videoTrack.getSettings(); | |
| canvas.width = settings.width || 640; | |
| canvas.height = settings.height || 480; | |
| const context = canvas.getContext('2d'); | |
| context.drawImage(videoFeed, 0, 0, canvas.width, canvas.height); | |
| canvas.toBlob(async (blob) => { | |
| if (!blob) { | |
| console.error('Failed to create image blob'); | |
| addMessageToUI('error', 'Failed to capture image.'); | |
| return; | |
| } | |
| console.log('Image captured, sending to server...'); | |
| const formData = new FormData(); | |
| formData.append('session_id', sessionId); | |
| formData.append('photo', blob, 'capture.jpg'); | |
| showLoading(true); | |
| try { | |
| const response = await fetch(`${API_BASE_URL}/process_with_image`, { | |
| method: 'POST', | |
| body: formData | |
| }); | |
| const data = await response.json(); | |
| console.log('Captured image processed:', data); | |
| if (!response.ok) { | |
| throw new Error(data.error || 'Image processing failed'); | |
| } | |
| if (data.status === 'success') { | |
| handleApiResponse(data.response, data.category); | |
| } else { | |
| throw new Error('Unexpected response format'); | |
| } | |
| } catch (error) { | |
| console.error('Error processing captured image:', error); | |
| handleApiResponse(`Sorry, I couldn't process the captured image: ${error.message}`, null, true); | |
| } finally { | |
| showLoading(false); | |
| } | |
| }, 'image/jpeg', 0.8); | |
| } | |
| // --- LOCAL STORAGE --- | |
| function saveHistory() { | |
| if (chatBox) { | |
| try { | |
| localStorage.setItem('medibotChatHistory', chatBox.innerHTML); | |
| } catch (error) { | |
| console.warn('Could not save chat history:', error); | |
| } | |
| } | |
| } | |
| function loadHistory() { | |
| try { | |
| const history = localStorage.getItem('medibotChatHistory'); | |
| if (history && chatBox) { | |
| chatBox.innerHTML = history; | |
| chatBox.scrollTop = chatBox.scrollHeight; | |
| console.log('Chat history loaded'); | |
| } | |
| } catch (error) { | |
| console.warn('Could not load chat history:', error); | |
| } | |
| } | |
| function clearHistory() { | |
| if (chatBox) { | |
| chatBox.innerHTML = ''; | |
| localStorage.removeItem('medibotChatHistory'); | |
| console.log('Chat history cleared'); | |
| } | |
| } | |
| // --- UTILITY FUNCTIONS --- | |
| function downloadKnowledgeBase() { | |
| console.log('Knowledge base download requested'); | |
| } | |
| // --- EXPOSE FUNCTIONS TO GLOBAL SCOPE (for debugging) --- | |
| window.medibotDebug = { | |
| clearHistory, | |
| checkSystemHealth, | |
| switchMode, | |
| getCurrentMode: () => currentMode, | |
| getSessionId: () => sessionId, | |
| getSystemStatus: () => systemStatus | |
| }; | |
| // --- START THE APP --- | |
| initializeApp(); | |
| }); |