BITShyd / trailssssss 2 /script.js
LOHAMEIT's picture
Upload 16 files
de0d4b5 verified
raw
history blame
9.76 kB
// script.js
const video = document.getElementById('video');
const loadingMessage = document.getElementById('loading-message');
const errorMessage = document.getElementById('error-message');
const statusMessage = document.getElementById('status-message');
let canvas;
let lastExpression = '';
let isProcessing = false;
// Define emojis and prompts for each emotion with more natural responses
const expressionsToEmojiAndPrompt = {
happy: {
emoji: '๐Ÿ˜Š',
responses: [
"Your smile is contagious! How can I make your day even better?",
"It's wonderful to see you happy! What's bringing you joy today?",
"That's a beautiful smile! Let's keep that positive energy going!"
]
},
sad: {
emoji: '๐Ÿ˜ข',
responses: [
"I notice you seem down. Would you like to talk about what's bothering you?",
"Sometimes we all need a moment to feel our emotions. I'm here to listen.",
"Remember that difficult moments are temporary. How can I support you?"
]
},
angry: {
emoji: '๐Ÿ˜ ',
responses: [
"I can see you're frustrated. Let's take a deep breath together.",
"Sometimes anger tells us something important. Would you like to discuss it?",
"I understand you're upset. How can we work through this together?"
]
},
neutral: {
emoji: '๐Ÿ˜',
responses: [
"How are you feeling today? I'm here to chat about anything.",
"Sometimes a neutral moment is good for reflection. What's on your mind?",
"Is there something specific you'd like to discuss?"
]
},
disgusted: {
emoji: '๐Ÿคข',
responses: [
"Something seems to be bothering you. Would you like to talk about it?",
"Let's focus on making this situation better. What would help?",
"I notice your discomfort. How can we improve things?"
]
},
surprised: {
emoji: '๐Ÿ˜ฎ',
responses: [
"Oh! What caught you by surprise? I'd love to hear about it!",
"Unexpected moments can be exciting! Want to share what surprised you?",
"That's quite a reaction! What happened?"
]
},
fearful: {
emoji: '๐Ÿ˜จ',
responses: [
"You're safe here. Would you like to talk about what's concerning you?",
"I understand feeling scared. Let's work through this together.",
"Sometimes sharing our fears makes them less overwhelming. I'm here to listen."
]
}
};
// Load face detection models with better error handling
async function loadModels() {
loadingMessage.style.display = 'block';
try {
const MODEL_URL = '/models';
await Promise.all([
faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL),
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL),
faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL),
faceapi.nets.faceExpressionNet.loadFromUri(MODEL_URL)
]);
loadingMessage.style.display = 'none';
updateStatus('Models loaded successfully!', 'success');
await startVideo();
} catch (err) {
handleError('Error loading models: ' + err.message);
}
}
// Enhanced video stream initialization
async function startVideo() {
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 720 },
height: { ideal: 560 },
facingMode: 'user'
}
});
video.srcObject = stream;
updateStatus('Camera initialized successfully!', 'success');
} catch (err) {
handleError('Camera access denied: ' + err.message);
}
}
// Stop video stream
function stopVideo() {
const stream = video.srcObject;
if (stream) {
const tracks = stream.getTracks();
tracks.forEach(track => track.stop());
video.srcObject = null;
}
}
// Create canvas with proper positioning
function createCanvas() {
if (!canvas) {
canvas = faceapi.createCanvasFromMedia(video);
canvas.style.position = 'absolute';
canvas.style.top = '0';
canvas.style.left = '0';
document.querySelector('.container').append(canvas);
}
}
// Enhanced face detection with debouncing
async function detectFaces(interval = 500) {
if (!video || !canvas || isProcessing) return;
isProcessing = true;
const displaySize = { width: video.width, height: video.height };
faceapi.matchDimensions(canvas, displaySize);
try {
const detections = await faceapi.detectAllFaces(video,
new faceapi.TinyFaceDetectorOptions())
.withFaceLandmarks()
.withFaceExpressions();
if (detections.length === 0) {
updateStatus('No face detected', 'warning');
return;
}
const resizedDetections = faceapi.resizeResults(detections, displaySize);
drawDetections(resizedDetections);
processExpressions(resizedDetections);
} catch (err) {
handleError('Detection error: ' + err.message);
} finally {
isProcessing = false;
setTimeout(() => detectFaces(interval), interval);
}
}
// Draw detections with improved visualization
function drawDetections(resizedDetections) {
const ctx = canvas.getContext('2d');
ctx.clearRect(0, 0, canvas.width, canvas.height);
// Draw with different colors for better visibility
faceapi.draw.drawDetections(canvas, resizedDetections, { boxColor: '#00ff00' });
faceapi.draw.drawFaceLandmarks(canvas, resizedDetections, { color: '#00ff00' });
faceapi.draw.drawFaceExpressions(canvas, resizedDetections);
}
// Process expressions and generate responses
function processExpressions(detections) {
detections.forEach(detection => {
const expressions = detection.expressions;
const topExpression = Object.keys(expressions)
.reduce((a, b) => expressions[a] > expressions[b] ? a : b);
if (topExpression !== lastExpression) {
lastExpression = topExpression;
const emojiData = expressionsToEmojiAndPrompt[topExpression];
const randomResponse = emojiData.responses[
Math.floor(Math.random() * emojiData.responses.length)
];
sendPromptToOllama(randomResponse);
drawEmoji(detection, emojiData.emoji);
updateStatus(`Detected emotion: ${topExpression}`, 'info');
}
});
}
// Draw emoji with improved positioning
function drawEmoji(detection, emoji) {
const ctx = canvas.getContext('2d');
const { x, y, width } = detection.detection.box;
ctx.font = `${width * 0.2}px Arial`;
ctx.fillStyle = '#ffffff';
ctx.strokeStyle = '#000000';
ctx.lineWidth = 2;
ctx.textAlign = 'center';
ctx.strokeText(emoji, x + width / 2, y - 10);
ctx.fillText(emoji, x + width / 2, y - 10);
}
// Enhanced error handling
function handleError(message) {
console.error(message);
errorMessage.textContent = message;
errorMessage.style.display = 'block';
loadingMessage.style.display = 'none';
// Hide error after 5 seconds
setTimeout(() => {
errorMessage.style.display = 'none';
}, 5000);
}
// Status updates
function updateStatus(message, type) {
statusMessage.textContent = message;
statusMessage.className = `status-message ${type}`;
statusMessage.style.display = 'block';
setTimeout(() => {
statusMessage.style.display = 'none';
}, 3000);
}
// Ollama API integration
async function sendPromptToOllama(prompt) {
try {
// Create a context-aware prompt
const fullPrompt = `You are an AI assistant responding to a user's emotional state.
The user appears to be ${lastExpression}.
Respond naturally and empathetically to this prompt: ${prompt}`;
const response = await fetch('http://localhost:11434/api/generate', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: "llama2",
prompt: fullPrompt,
stream: false
})
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
if (data && data.response) {
playTextAsSpeech(data.response);
updateStatus('Response received', 'success');
}
} catch (error) {
handleError('Ollama API Error: ' + error.message);
console.error('Full error:', error);
// Fallback to default responses if API fails
const fallbackResponse = expressionsToEmojiAndPrompt[lastExpression].responses[0];
playTextAsSpeech(fallbackResponse);
}
}
// Improved speech synthesis
function playTextAsSpeech(text) {
try {
const speech = new SpeechSynthesisUtterance(text);
speech.rate = 1.0;
speech.pitch = 1.0;
speech.volume = 1.0;
window.speechSynthesis.cancel(); // Cancel any ongoing speech
window.speechSynthesis.speak(speech);
} catch (error) {
handleError('Speech synthesis error: ' + error.message);
}
}
// Initialize
document.addEventListener('DOMContentLoaded', loadModels);
video.addEventListener('play', () => {
createCanvas();
detectFaces(500);
});
// Clean up resources when the page is closed
window.addEventListener('beforeunload', () => {
stopVideo();
});