video-analysis / index.html
Lauroscope's picture
show only actual value for the emotion analysis layer on top of the camera view - Follow Up Deployment
fe5e751 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Advanced Video Analysis</title>
<script src="https://cdn.tailwindcss.com"></script>
<style>
.camera-container {
position: relative;
width: 100%;
padding-bottom: 56.25%; /* 16:9 aspect ratio */
background-color: #111;
border-radius: 12px;
overflow: hidden;
box-shadow: 0 10px 25px rgba(0, 0, 0, 0.3);
}
.camera-view {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
object-fit: cover;
}
.emotion-overlay {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
pointer-events: none;
z-index: 1;
}
.face-box {
position: absolute;
border: 3px solid #3b82f6;
border-radius: 8px;
background-color: rgba(59, 130, 246, 0.2);
}
.emotion-label {
position: absolute;
background-color: rgba(0, 0, 0, 0.7);
color: white;
padding: 2px 6px;
border-radius: 4px;
font-size: 12px;
white-space: nowrap;
}
.emotion-meter {
height: 6px;
border-radius: 3px;
background: rgba(255,255,255,0.1);
margin: 4px 0;
}
.meter-fill {
height: 100%;
border-radius: 3px;
background: linear-gradient(to right, currentColor 0%, var(--tw-gradient-to) 100%);
box-shadow: 0 0 6px currentColor;
transition: width 0.5s ease-out;
}
.change-indicator {
display: inline-block;
width: 12px;
text-align: center;
}
.change-up {
color: #10b981;
}
.change-down {
color: #ef4444;
}
.change-neutral {
color: #9CA3AF;
}
.loading-animation {
animation: pulse 1.5s infinite;
}
@keyframes pulse {
0%, 100% { opacity: 0.5; }
50% { opacity: 1; }
}
.emotion-card {
transition: all 0.3s ease;
}
.emotion-card.active {
transform: scale(1.05);
box-shadow: 0 0 15px rgba(59, 130, 246, 0.5);
}
.recording-indicator {
display: inline-block;
width: 10px;
height: 10px;
border-radius: 50%;
background-color: #ef4444;
margin-right: 6px;
animation: pulse 1.5s infinite;
}
.analysis-overlay {
position: absolute;
bottom: 10px;
left: 10px;
background-color: rgba(0, 0, 0, 0.7);
border-radius: 8px;
padding: 6px;
width: 105px;
z-index: 2;
pointer-events: none;
}
.analysis-emotion {
display: flex;
justify-content: space-between;
margin-bottom: 2px;
font-size: 10px;
color: white;
line-height: 1.2;
}
.analysis-emotion-name {
flex: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
margin-right: 4px;
}
.analysis-emotion-value {
width: 25px;
text-align: right;
}
.analysis-emotion-meter {
height: 3px;
background-color: rgba(255, 255, 255, 0.2);
border-radius: 2px;
margin-top: 1px;
}
.analysis-emotion-fill {
height: 100%;
border-radius: 2px;
background-color: white;
}
.analysis-title {
font-size: 10px;
font-weight: bold;
color: white;
margin-bottom: 4px;
border-bottom: 1px solid rgba(255, 255, 255, 0.2);
padding-bottom: 2px;
}
.upload-progress {
height: 4px;
background-color: rgba(255, 255, 255, 0.2);
border-radius: 2px;
margin-top: 8px;
overflow: hidden;
}
.upload-progress-bar {
height: 100%;
background-color: #3b82f6;
width: 0%;
transition: width 0.3s ease;
}
.download-buttons {
display: flex;
gap: 8px;
justify-content: center;
margin-top: 8px;
}
/* Canvas for recording */
#recording-canvas {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 10;
pointer-events: none;
opacity: 0;
}
/* Fear-specific styling */
.fear-card {
border-left: 4px solid #8b5cf6;
}
.fear-high {
animation: fear-pulse 0.8s infinite;
}
@keyframes fear-pulse {
0%, 100% { box-shadow: 0 0 0 0 rgba(139, 92, 246, 0.4); }
70% { box-shadow: 0 0 0 8px rgba(139, 92, 246, 0); }
}
/* Upload button styling */
.upload-btn {
position: relative;
overflow: hidden;
display: inline-block;
}
.upload-btn input[type="file"] {
position: absolute;
left: 0;
top: 0;
opacity: 0;
width: 100%;
height: 100%;
cursor: pointer;
}
.video-controls {
display: flex;
justify-content: center;
gap: 8px;
margin-top: 8px;
}
.video-time {
color: white;
font-size: 14px;
display: flex;
align-items: center;
}
/* AI Detection styling */
.ai-detection-meter {
height: 8px;
border-radius: 4px;
background: linear-gradient(90deg, #10b981, #3b82f6, #8b5cf6);
margin-top: 4px;
}
.ai-tool-indicator {
display: inline-block;
width: 12px;
height: 12px;
border-radius: 50%;
margin-right: 6px;
}
.ai-tool-item {
display: flex;
align-items: center;
padding: 4px 0;
border-bottom: 1px solid rgba(255,255,255,0.1);
}
.ai-tool-probability {
margin-left: auto;
font-weight: bold;
}
.ai-detection-high {
animation: ai-pulse 1s infinite;
}
@keyframes ai-pulse {
0%, 100% { box-shadow: 0 0 0 0 rgba(239, 68, 68, 0.4); }
70% { box-shadow: 0 0 0 6px rgba(239, 68, 68, 0); }
}
.ai-signature {
font-family: monospace;
background-color: rgba(0,0,0,0.3);
padding: 2px 4px;
border-radius: 4px;
font-size: 12px;
word-break: break-all;
}
.ai-tool-card {
transition: all 0.3s ease;
border-left: 4px solid;
}
.ai-tool-card.active {
transform: scale(1.02);
box-shadow: 0 0 10px rgba(59, 130, 246, 0.3);
}
.ai-global-card {
border-left: 4px solid #3b82f6;
}
.ai-high-probability {
animation: ai-highlight 1.5s infinite;
}
@keyframes ai-highlight {
0%, 100% { box-shadow: 0 0 0 0 rgba(239, 68, 68, 0.3); }
50% { box-shadow: 0 0 0 4px rgba(239, 68, 68, 0); }
}
/* Responsive grid for analysis sections */
.analysis-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 1rem;
margin-top: 1.5rem;
}
@media (min-width: 1024px) {
.analysis-grid {
grid-template-columns: 1fr 1fr;
}
}
/* Camera selection dropdown */
.camera-selector {
position: absolute;
top: 10px;
right: 10px;
z-index: 20;
background-color: rgba(0, 0, 0, 0.7);
border-radius: 4px;
padding: 4px;
display: none;
}
.camera-selector.active {
display: block;
}
.camera-selector select {
background-color: #1f2937;
color: white;
border: 1px solid #4b5563;
border-radius: 4px;
padding: 4px 8px;
font-size: 12px;
}
.camera-selector select:focus {
outline: none;
border-color: #3b82f6;
}
.camera-toggle-btn {
position: absolute;
top: 10px;
right: 10px;
z-index: 20;
background-color: rgba(0, 0, 0, 0.7);
border-radius: 4px;
padding: 4px 8px;
color: white;
font-size: 12px;
cursor: pointer;
display: flex;
align-items: center;
gap: 4px;
}
.camera-toggle-btn:hover {
background-color: rgba(0, 0, 0, 0.9);
}
</style>
</head>
<body class="bg-gray-900 min-h-screen flex flex-col items-center justify-start p-4">
<div class="w-full max-w-6xl">
<h1 class="text-3xl font-bold text-center text-white mb-2">Advanced Video Analysis</h1>
<p class="text-center text-gray-300 mb-6">Detect emotions and AI-generated content in videos</p>
<!-- Video Container -->
<div class="w-full">
<div class="camera-container mb-4">
<video id="camera-view" class="camera-view" autoplay playsinline></video>
<div id="emotion-overlay" class="emotion-overlay"></div>
<div id="analysis-overlay" class="analysis-overlay hidden">
<div class="analysis-title">Emotion Analysis</div>
<div id="analysis-emotions-container"></div>
</div>
<canvas id="recording-canvas"></canvas>
<!-- Camera toggle button -->
<div id="camera-toggle-btn" class="camera-toggle-btn hidden">
<i class="fas fa-camera"></i>
<span>Switch Camera</span>
</div>
<!-- Camera selection dropdown -->
<div id="camera-selector" class="camera-selector">
<select id="camera-select">
<option value="">Select Camera</option>
</select>
</div>
<div id="loading-indicator" class="absolute inset-0 flex items-center justify-center bg-black bg-opacity-70">
<div class="text-center">
<div class="inline-block animate-spin rounded-full h-10 w-10 border-t-2 border-b-2 border-blue-500 mb-2"></div>
<p class="text-white">Loading analysis models...</p>
</div>
</div>
<div id="upload-placeholder" class="absolute inset-0 flex items-center justify-center bg-black bg-opacity-70 hidden">
<div class="text-center p-6 bg-gray-800 rounded-lg">
<i class="fas fa-video text-blue-500 text-4xl mb-4"></i>
<p class="text-white mb-4">No video source selected</p>
<div class="flex justify-center gap-4">
<button id="use-camera-btn" class="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition">
<i class="fas fa-camera mr-2"></i> Use Camera
</button>
<div class="upload-btn">
<button class="px-4 py-2 bg-purple-600 text-white rounded-lg hover:bg-purple-700 transition">
<i class="fas fa-upload mr-2"></i> Upload Video
</button>
<input id="video-upload" type="file" accept="video/*">
</div>
</div>
</div>
</div>
</div>
<div id="video-controls" class="video-controls hidden">
<button id="play-btn" class="px-4 py-2 bg-green-600 text-white rounded-lg hover:bg-green-700 transition">
<i class="fas fa-play mr-2"></i> Play
</button>
<button id="pause-btn" class="px-4 py-2 bg-yellow-600 text-white rounded-lg hover:bg-yellow-700 transition">
<i class="fas fa-pause mr-2"></i> Pause
</button>
<button id="video-stop-btn" class="px-4 py-2 bg-red-600 text-white rounded-lg hover:bg-red-700 transition">
<i class="fas fa-stop mr-2"></i> Stop
</button>
<div class="video-time">
<span id="current-time">0:00</span> / <span id="duration">0:00</span>
</div>
</div>
<div class="flex flex-wrap justify-center gap-4 mb-6">
<button id="start-btn" class="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition">
<i class="fas fa-play mr-2"></i> Start Analysis
</button>
<button id="record-btn" class="px-4 py-2 bg-red-600 text-white rounded-lg hover:bg-red-700 transition hidden">
<i class="fas fa-circle mr-2"></i> Record
</button>
<button id="stop-btn" class="px-4 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 transition hidden">
<i class="fas fa-stop mr-2"></i> Stop Analysis
</button>
<button id="switch-source-btn" class="px-4 py-2 bg-purple-600 text-white rounded-lg hover:bg-purple-700 transition">
<i class="fas fa-exchange-alt mr-2"></i> Switch Source
</button>
</div>
<div id="recording-status" class="text-center text-gray-300 mb-4 hidden">
<span class="recording-indicator"></span>
<span>Recording in progress...</span>
</div>
<div id="upload-status" class="text-center text-gray-300 mb-4 hidden">
<div class="flex items-center justify-center">
<div class="animate-spin rounded-full h-4 w-4 border-t-2 border-b-2 border-blue-500 mr-2"></div>
<span>Processing recording...</span>
</div>
<div class="upload-progress">
<div id="upload-progress-bar" class="upload-progress-bar"></div>
</div>
</div>
<div id="download-section" class="text-center hidden">
<div class="bg-gray-800 rounded-lg p-4 mb-4">
<p class="text-gray-300 mb-2">Your recording has been saved:</p>
<div class="download-buttons">
<button id="local-download-btn" class="px-4 py-2 bg-green-600 text-white rounded-lg hover:bg-green-700 transition inline-block">
<i class="fas fa-download mr-2"></i> Download MP4
</button>
</div>
</div>
</div>
</div>
<!-- Analysis Sections Grid -->
<div class="analysis-grid">
<!-- Emotion Analysis Section -->
<div class="bg-gray-800 rounded-lg p-4">
<h2 class="text-xl font-semibold text-white mb-3">Emotion Analysis</h2>
<div id="emotion-results" class="space-y-4">
<div class="emotion-card bg-gray-700 rounded-lg p-3">
<div class="flex justify-between items-start mb-1">
<div class="flex items-center">
<span class="text-xl mr-2">😊</span>
<span class="font-medium text-white">Happiness</span>
</div>
<div class="text-right">
<span class="text-green-400 text-lg font-bold">0%</span>
<div class="flex items-center justify-end text-xs text-gray-300">
<span class="inline-block w-10">Avg: 0%</span>
<span class="inline-block w-10 ml-1">Δ <span class="change-indicator"></span> 0%</span>
</div>
</div>
</div>
<div class="emotion-meter mt-1.5 rounded-full h-2.5 overflow-hidden">
<div class="meter-fill h-full bg-gradient-to-r from-green-400 to-green-600 transition-all duration-300" style="width: 0%"></div>
</div>
<div class="flex justify-between mt-1">
<span class="text-xs text-gray-300">0%</span>
<div class="text-xs text-gray-300 opacity-0">-</div>
<span class="text-xs text-gray-300">100%</span>
</div>
<div class="text-center mt-1 text-xs text-blue-300">
Avg: <span class="avg-value">0%</span> over <span class="frame-count">0</span> frames
</div>
</div>
<div class="emotion-card bg-gray-700 rounded-lg p-3">
<div class="flex justify-between items-start mb-1">
<div class="flex items-center">
<span class="text-xl mr-2">😢</span>
<span class="font-medium text-white">Sadness</span>
</div>
<div class="text-right">
<span class="text-blue-400 text-lg font-bold">0%</span>
<div class="flex items-center justify-end text-xs text-gray-300">
<span class="inline-block w-10">Avg: 0%</span>
<span class="inline-block w-10 ml-1">Δ <span class="change-indicator"></span> 0%</span>
</div>
</div>
</div>
<div class="emotion-meter mt-1.5 rounded-full h-2.5 overflow-hidden">
<div class="meter-fill h-full bg-gradient-to-r from-blue-400 to-blue-600 transition-all duration-300" style="width: 0%"></div>
</div>
<div class="flex justify-between mt-1">
<span class="text-xs text-gray-300">0%</span>
<div class="text-xs text-gray-300 opacity-0">-</div>
<span class="text-xs text-gray-300">100%</span>
</div>
<div class="text-center mt-1 text-xs text-blue-300">
Avg: <span class="avg-value">0%</span> over <span class="frame-count">0</span> frames
</div>
</div>
<div class="emotion-card bg-gray-700 rounded-lg p-3">
<div class="flex justify-between items-start mb-1">
<div class="flex items-center">
<span class="text-xl mr-2">😠</span>
<span class="font-medium text-white">Anger</span>
</div>
<div class="text-right">
<span class="text-red-400 text-lg font-bold">0%</span>
<div class="flex items-center justify-end text-xs text-gray-300">
<span class="inline-block w-10">Avg: 0%</span>
<span class="inline-block w-10 ml-1">Δ <span class="change-indicator"></span> 0%</span>
</div>
</div>
</div>
<div class="emotion-meter mt-1.5 rounded-full h-2.5 overflow-hidden">
<div class="meter-fill h-full bg-gradient-to-r from-red-400 to-red-600 transition-all duration-300" style="width: 0%"></div>
</div>
<div class="flex justify-between mt-1">
<span class="text-xs text-gray-300">0%</span>
<div class="text-xs text-gray-300 opacity-0">-</div>
<span class="text-xs text-gray-300">100%</span>
</div>
<div class="text-center mt-1 text-xs text-red-300">
Avg: <span class="avg-value">0%</span> over <span class="frame-count">0</span> frames
</div>
</div>
<div class="emotion-card fear-card bg-gray-700 rounded-lg p-3">
<div class="flex justify-between items-start mb-1">
<div class="flex items-center">
<span class="text-xl mr-2">😨</span>
<span class="font-medium text-white">Fear</span>
</div>
<div class="text-right">
<span class="text-purple-400 text-lg font-bold">0%</span>
<div class="flex items-center justify-end text-xs text-gray-300">
<span class="inline-block w-10">Avg: 0%</span>
<span class="inline-block w-10 ml-1">Δ <span class="change-indicator"></span> 0%</span>
</div>
</div>
</div>
<div class="emotion-meter mt-1.5 rounded-full h-2.5 overflow-hidden">
<div class="meter-fill h-full bg-gradient-to-r from-purple-400 to-purple-600 transition-all duration-300" style="width: 0%"></div>
</div>
<div class="flex justify-between mt-1">
<span class="text-xs text-gray-300">0%</span>
<div class="text-xs text-gray-300 opacity-0">-</div>
<span class="text-xs text-gray-300">100%</span>
</div>
<div class="text-center mt-1 text-xs text-purple-300">
Avg: <span class="avg-value">0%</span> over <span class="frame-count">0</span> frames
</div>
</div>
<div class="emotion-card bg-gray-700 rounded-lg p-3">
<div class="flex justify-between items-start mb-1">
<div class="flex items-center">
<span class="text-xl mr-2">😲</span>
<span class="font-medium text-white">Surprise</span>
</div>
<div class="text-right">
<span class="text-yellow-400 text-lg font-bold">0%</span>
<div class="flex items-center justify-end text-xs text-gray-300">
<span class="inline-block w-10">Avg: 0%</span>
<span class="inline-block w-10 ml-1">Δ <span class="change-indicator"></span> 0%</span>
</div>
</div>
</div>
<div class="emotion-meter mt-1.5 rounded-full h-2.5 overflow-hidden">
<div class="meter-fill h-full bg-gradient-to-r from-yellow-400 to-yellow-600 transition-all duration-300" style="width: 0%"></div>
</div>
<div class="flex justify-between mt-1">
<span class="text-xs text-gray-300">0%</span>
<div class="text-xs text-gray-300 opacity-0">-</div>
<span class="text-xs text-gray-300">100%</span>
</div>
<div class="text-center mt-1 text-xs text-yellow-300">
Avg: <span class="avg-value">0%</span> over <span class="frame-count">0</span> frames
</div>
</div>
<div class="emotion-card bg-gray-700 rounded-lg p-3">
<div class="flex justify-between items-start mb-1">
<div class="flex items-center">
<span class="text-xl mr-2">😐</span>
<span class="font-medium text-white">Neutral</span>
</div>
<div class="text-right">
<span class="text-gray-400 text-lg font-bold">0%</span>
<div class="flex items-center justify-end text-xs text-gray-300">
<span class="inline-block w-10">Avg: 0%</span>
<span class="inline-block w-10 ml-1">Δ <span class="change-indicator"></span> 0%</span>
</div>
</div>
</div>
<div class="emotion-meter mt-1.5 rounded-full h-2.5 overflow-hidden">
<div class="meter-fill h-full bg-gradient-to-r from-gray-400 to-gray-600 transition-all duration-300" style="width: 0%"></div>
</div>
<div class="flex justify-between mt-1">
<span class="text-xs text-gray-300">0%</span>
<div class="text-xs text-gray-300 opacity-0">-</div>
<span class="text-xs text-gray-300">100%</span>
</div>
<div class="text-center mt-1 text-xs text-gray-300">
Avg: <span class="avg-value">0%</span> over <span class="frame-count">0</span> frames
</div>
</div>
</div>
</div>
<!-- AI Generation Analysis Section -->
<div class="bg-gray-800 rounded-lg p-4">
<h2 class="text-xl font-semibold text-white mb-3">AI Generation Analysis</h2>
<!-- Global AI Probability Card -->
<div class="ai-tool-card ai-global-card bg-gray-700 rounded-lg p-3 mb-4">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🤖 AI Probability</span>
<span id="ai-probability-value" class="text-blue-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div id="ai-probability-meter" class="meter-fill" style="width: 0%"></div>
</div>
<div class="emotion-legend">
<span>Real</span>
<span>AI-Generated</span>
</div>
</div>
<!-- AI Tools Analysis -->
<h3 class="font-medium text-white mb-2">Possible AI Tools:</h3>
<div id="ai-tools-results" class="space-y-3">
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #3b82f6;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">💎 Kling AI v2</span>
<span class="text-blue-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #3b82f6;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #8b5cf6;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🎬 Runway Gen-3</span>
<span class="text-purple-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #8b5cf6;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #f59e0b;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🎥 Sora v2</span>
<span class="text-yellow-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #f59e0b;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #10b981;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🌈 Luma Dream Machine</span>
<span class="text-green-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #10b981;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #ec4899;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🎨 Pika 1.5</span>
<span class="text-pink-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #ec4899;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #ef4444;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🔄 Stable Video Diffusion 2.0</span>
<span class="text-red-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #ef4444;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #06b6d4;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🌊 Hailuo v3</span>
<span class="text-cyan-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #06b6d4;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #8b5cf6;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">💫 Google Veo</span>
<span class="text-purple-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #8b5cf6;"></div>
</div>
</div>
<div class="ai-tool-card bg-gray-700 rounded-lg p-3" style="border-left-color: #d946ef;">
<div class="flex justify-between items-center">
<span class="font-medium text-white">🌌 Wan Server v2</span>
<span class="text-fuchsia-400">0%</span>
</div>
<div class="emotion-meter mt-1">
<div class="meter-fill" style="width: 0%; background-color: #d946ef;"></div>
</div>
</div>
</div>
<!-- Detection Indicators -->
<div id="ai-detection-details" class="mt-4 text-gray-300 text-sm">
<div id="ai-detection-indicators" class="hidden">
<h3 class="font-medium text-white mb-2">Detection Indicators:</h3>
<div class="text-xs space-y-1">
<div class="flex items-start">
<span class="text-xs mr-2"></span>
<span>No AI indicators detected yet</span>
</div>
</div>
</div>
<div id="ai-metadata" class="mt-3 hidden">
<h3 class="font-medium text-white mb-2">Metadata Analysis:</h3>
<div class="text-xs">
<p class="ai-signature">No metadata detected</p>
</div>
</div>
</div>
</div>
</div>
<!-- Analysis Info Section -->
<div class="bg-gray-800 rounded-lg p-4 mt-4">
<h2 class="text-xl font-semibold text-white mb-3">Analysis Info</h2>
<div class="grid grid-cols-2 md:grid-cols-3 gap-4 text-gray-300">
<div>
<p><span class="font-medium">Status:</span> <span id="status-text" class="text-yellow-400">Waiting to start</span></p>
</div>
<div>
<p><span class="font-medium">Source:</span> <span id="source-type">None</span></p>
</div>
<div>
<p><span class="font-medium">Faces detected:</span> <span id="face-count">0</span></p>
</div>
<div>
<p><span class="font-medium">Detection speed:</span> <span id="detection-speed">0</span> ms</p>
</div>
<div>
<p><span class="font-medium">FPS:</span> <span id="fps-counter">0</span></p>
</div>
<div>
<p><span class="font-medium">Recording time:</span> <span id="recording-time">0:00</span></p>
</div>
</div>
</div>
<div class="mt-8 text-center text-gray-400 text-sm">
<p>Advanced video analysis with emotion detection and AI generation identification.</p>
</div>
</div>
<!-- Load TensorFlow.js and face-api.js from CDN -->
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@2.0.0/dist/tf.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/face-api.js@0.22.2/dist/face-api.min.js"></script>
<script>
document.addEventListener('DOMContentLoaded', function() {
// DOM Elements
const cameraView = document.getElementById('camera-view');
const emotionOverlay = document.getElementById('emotion-overlay');
const analysisOverlay = document.getElementById('analysis-overlay');
const analysisEmotionsContainer = document.getElementById('analysis-emotions-container');
const startBtn = document.getElementById('start-btn');
const recordBtn = document.getElementById('record-btn');
const stopBtn = document.getElementById('stop-btn');
const loadingIndicator = document.getElementById('loading-indicator');
const statusText = document.getElementById('status-text');
const faceCount = document.getElementById('face-count');
const detectionSpeed = document.getElementById('detection-speed');
const fpsCounter = document.getElementById('fps-counter');
const recordingStatus = document.getElementById('recording-status');
const uploadStatus = document.getElementById('upload-status');
const downloadSection = document.getElementById('download-section');
const localDownloadBtn = document.getElementById('local-download-btn');
const recordingTime = document.getElementById('recording-time');
const emotionCards = document.querySelectorAll('.emotion-card');
const uploadProgressBar = document.getElementById('upload-progress-bar');
const recordingCanvas = document.getElementById('recording-canvas');
const recordingCtx = recordingCanvas.getContext('2d');
const fearCard = document.querySelector('.fear-card');
const uploadPlaceholder = document.getElementById('upload-placeholder');
const videoUpload = document.getElementById('video-upload');
const useCameraBtn = document.getElementById('use-camera-btn');
const switchSourceBtn = document.getElementById('switch-source-btn');
const videoControls = document.getElementById('video-controls');
const playBtn = document.getElementById('play-btn');
const pauseBtn = document.getElementById('pause-btn');
const videoStopBtn = document.getElementById('video-stop-btn');
const currentTime = document.getElementById('current-time');
const duration = document.getElementById('duration');
const sourceType = document.getElementById('source-type');
const cameraToggleBtn = document.getElementById('camera-toggle-btn');
const cameraSelector = document.getElementById('camera-selector');
const cameraSelect = document.getElementById('camera-select');
// AI Detection Elements
const aiProbabilityValue = document.getElementById('ai-probability-value');
const aiProbabilityMeter = document.getElementById('ai-probability-meter');
const aiToolsResults = document.getElementById('ai-tools-results');
const aiToolCards = document.querySelectorAll('.ai-tool-card');
const aiDetectionIndicators = document.getElementById('ai-detection-indicators');
const aiMetadata = document.getElementById('ai-metadata');
// State
let stream = null;
let isDetecting = false;
let isRecording = false;
let detectionInterval = null;
let lastDetectionTime = 0;
let frameCount = 0;
let lastFpsUpdate = 0;
let fps = 0;
let mediaRecorder = null;
let recordedChunks = [];
let recordingStartTime = 0;
let recordingTimer = null;
let canvasStream = null;
let recordedBlob = null;
let animationId = null;
let videoSource = null; // 'camera' or 'upload'
let videoFile = null;
let availableCameras = [];
let currentCameraId = null;
// Emotion tracking
const emotionHistory = {
happy: [],
sad: [],
angry: [],
fearful: [],
surprised: [],
neutral: []
};
// Emotion labels and colors
const emotions = [
{ name: 'happy', label: '😊', color: 'text-green-400', meterColor: 'from-green-400' },
{ name: 'sad', label: '😢', color: 'text-blue-400', meterColor: 'bg-blue-500' },
{ name: 'angry', label: '😠', color: 'text-red-400', meterColor: 'bg-red-500' },
{ name: 'fearful', label: '😨', color: 'text-purple-400', meterColor: 'bg-purple-500', sensitivity: 1.5 },
{ name: 'surprised', label: '😲', color: 'text-yellow-400', meterColor: 'bg-yellow-500' },
{ name: 'neutral', label: '😐', color: 'text-gray-400', meterColor: 'bg-gray-500' }
];
// AI Tools database
const aiTools = [
{ name: 'Kling AI v2', icon: '💎', color: '#3b82f6', textColor: 'text-blue-400', indicators: ['High consistency', 'Cinematic quality', 'Chinese localization'] },
{ name: 'Runway Gen-3', icon: '🎬', color: '#8b5cf6', textColor: 'text-purple-400', indicators: ['Advanced motion control', 'Director mode sequences', 'Multi-shot consistency'] },
{ name: 'Sora v2', icon: '🎥', color: '#f59e0b', textColor: 'text-yellow-400', indicators: ['Photorealistic details', 'Physics accuracy', 'Long context (60s+)'] },
{ name: 'Luma Dream Machine', icon: '🌈', color: '#10b981', textColor: 'text-green-400', indicators: ['Text-to-3D capability', 'High frame rate', 'Neural rendering'] },
{ name: 'Pika 1.5', icon: '🎨', color: '#ec4899', textColor: 'text-pink-400', indicators: ['Artistic styles', 'Camera controls', 'Community models'] },
{ name: 'Stable Video Diffusion 2.0', icon: '🔄', color: '#ef4444', textText: 'text-red-400', indicators: ['Open weights', 'Multi-view synthesis', 'Fine-tuned control'] },
{ name: 'Hailuo v3', icon: '🌊', color: '#06b6d4', textColor: 'text-cyan-400', indicators: ['Water simulation', 'Weather effects', 'Atmospheric rendering'] },
{ name: 'Google Veo', icon: '💫', color: '#8b5cf6', textColor: 'text-purple-400', indicators: ['4K resolution', 'Video editing integration', 'Storyboard mode'] },
{ name: 'Wan Server v2', icon: '🌌', color: '#d946ef', textColor: 'text-fuchsia-400', indicators: ['Anime style', 'Character consistency', 'Batch generation'] }
];
// AI Detection indicators
const aiIndicators = [
'Unnatural eye blinking patterns',
'Lack of micro-expressions',
'Perfect symmetry in facial features',
'Inconsistent lighting/shadows',
'Artifacts around hair edges',
'Repetitive movement patterns',
'Unnatural teeth rendering',
'Smooth but unrealistic skin texture',
'Limited facial expression range',
'Metadata anomalies'
];
// Initialize face-api.js models
async function loadModels() {
try {
statusText.textContent = 'Loading models...';
loadingIndicator.classList.remove('hidden');
// Load models from face-api.js CDN
await faceapi.nets.tinyFaceDetector.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models');
await faceapi.nets.faceLandmark68Net.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models');
await faceapi.nets.faceExpressionNet.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models');
loadingIndicator.classList.add('hidden');
statusText.textContent = 'Models loaded. Ready to start!';
statusText.className = 'text-green-400';
startBtn.disabled = false;
// Show source selection
showSourceSelection();
} catch (error) {
console.error('Error loading models:', error);
statusText.textContent = 'Error loading models. Please refresh.';
statusText.className = 'text-red-400';
loadingIndicator.innerHTML = `
<div class="text-center">
<i class="fas fa-exclamation-triangle text-red-500 text-4xl mb-2"></i>
<p class="text-white">Failed to load models. Please check your connection.</p>
<button onclick="location.reload()" class="mt-3 px-4 py-2 bg-blue-600 text-white rounded-lg">
<i class="fas fa-sync-alt mr-2"></i> Refresh Page
</button>
</div>
`;
}
}
// Show source selection (camera or upload)
function showSourceSelection() {
uploadPlaceholder.classList.remove('hidden');
cameraView.classList.add('hidden');
videoControls.classList.add('hidden');
cameraToggleBtn.classList.add('hidden');
sourceType.textContent = 'None';
}
// Get available cameras
async function getAvailableCameras() {
try {
const devices = await navigator.mediaDevices.enumerateDevices();
availableCameras = devices.filter(device => device.kind === 'videoinput');
// Update camera select dropdown
cameraSelect.innerHTML = '<option value="">Select Camera</option>';
availableCameras.forEach((camera, index) => {
const label = camera.label || `Camera ${index + 1}`;
cameraSelect.innerHTML += `<option value="${camera.deviceId}">${label}</option>`;
});
return availableCameras;
} catch (error) {
console.error('Error enumerating cameras:', error);
return [];
}
}
// Start camera with specific device
async function startCamera(deviceId = null) {
try {
statusText.textContent = 'Starting camera...';
statusText.className = 'text-yellow-400';
// Stop any existing stream
if (stream) {
stream.getTracks().forEach(track => track.stop());
}
// Get video stream with optional deviceId
const constraints = {
video: {
width: { ideal: 640 },
height: { ideal: 480 },
facingMode: deviceId ? undefined : 'user',
deviceId: deviceId ? { exact: deviceId } : undefined
},
audio: false
};
stream = await navigator.mediaDevices.getUserMedia(constraints);
// Store current camera ID
if (deviceId) {
currentCameraId = deviceId;
} else {
// Get the actual device ID being used
const videoTrack = stream.getVideoTracks()[0];
currentCameraId = videoTrack.getSettings().deviceId;
}
cameraView.srcObject = stream;
cameraView.classList.remove('hidden');
uploadPlaceholder.classList.add('hidden');
videoControls.classList.add('hidden');
// Show camera toggle button if multiple cameras available
if (availableCameras.length > 1) {
cameraToggleBtn.classList.remove('hidden');
}
// Wait for video to be ready
await new Promise((resolve) => {
cameraView.onloadedmetadata = resolve;
});
// Set canvas dimensions to match video
recordingCanvas.width = cameraView.videoWidth;
recordingCanvas.height = cameraView.videoHeight;
videoSource = 'camera';
sourceType.textContent = 'Camera';
} catch (error) {
console.error('Camera error:', error);
statusText.textContent = 'Camera access denied. Please enable permissions.';
statusText.className = 'text-red-400';
showSourceSelection();
}
}
// Switch to another camera
async function switchCamera(deviceId) {
if (!deviceId) return;
try {
// Stop detection while switching
const wasDetecting = isDetecting;
if (isDetecting) {
stopDetection();
}
// Start new camera
await startCamera(deviceId);
// Restart detection if it was running
if (wasDetecting) {
startDetection();
}
// Close camera selector
cameraSelector.classList.remove('active');
} catch (error) {
console.error('Error switching camera:', error);
statusText.textContent = 'Error switching camera.';
statusText.className = 'text-red-400';
}
}
// Handle video file upload
videoUpload.addEventListener('change', function(e) {
if (e.target.files && e.target.files.length > 0) {
videoFile = e.target.files[0];
loadVideoFile(videoFile);
}
});
// Load uploaded video file
function loadVideoFile(file) {
const videoURL = URL.createObjectURL(file);
cameraView.src = videoURL;
cameraView.srcObject = null;
cameraView.classList.remove('hidden');
uploadPlaceholder.classList.add('hidden');
cameraToggleBtn.classList.add('hidden');
// Wait for video metadata to load
cameraView.onloadedmetadata = function() {
// Set canvas dimensions to match video
recordingCanvas.width = cameraView.videoWidth;
recordingCanvas.height = cameraView.videoHeight;
// Show video controls
videoControls.classList.remove('hidden');
// Update duration display
updateVideoTimeDisplay();
// Listen for time updates
cameraView.ontimeupdate = updateVideoTimeDisplay;
};
videoSource = 'upload';
sourceType.textContent = 'Uploaded Video';
statusText.textContent = 'Video loaded. Ready to start detection!';
statusText.className = 'text-green-400';
}
// Update video time display
function updateVideoTimeDisplay() {
const current = cameraView.currentTime;
const total = cameraView.duration || 0;
currentTime.textContent = formatTime(current);
duration.textContent = formatTime(total);
}
// Format time as MM:SS
function formatTime(seconds) {
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}:${secs.toString().padStart(2, '0')}`;
}
// Play video
playBtn.addEventListener('click', function() {
cameraView.play();
if (isDetecting && !detectionInterval) {
detectionInterval = setInterval(detectEmotions, 300);
}
});
// Pause video
pauseBtn.addEventListener('click', function() {
cameraView.pause();
if (detectionInterval) {
clearInterval(detectionInterval);
detectionInterval = null;
}
});
// Stop video and analysis
videoStopBtn.addEventListener('click', function() {
stopDetection();
if (videoSource === 'upload') {
cameraView.pause();
cameraView.currentTime = 0;
currentTime.textContent = '0:00';
}
});
// Switch between camera and upload
switchSourceBtn.addEventListener('click', function() {
stopDetection();
showSourceSelection();
});
// Use camera button
useCameraBtn.addEventListener('click', async function() {
// Get available cameras first
await getAvailableCameras();
startCamera();
});
// Toggle camera selector
cameraToggleBtn.addEventListener('click', function(e) {
e.stopPropagation();
cameraSelector.classList.toggle('active');
});
// Close camera selector when clicking outside
document.addEventListener('click', function(e) {
if (!cameraSelector.contains(e.target) && e.target !== cameraToggleBtn) {
cameraSelector.classList.remove('active');
}
});
// Handle camera selection
cameraSelect.addEventListener('change', function() {
if (this.value) {
switchCamera(this.value);
}
});
// Start detection
async function startDetection() {
if (!videoSource) {
statusText.textContent = 'Please select a video source first';
statusText.className = 'text-red-400';
return;
}
// For uploaded videos, start from beginning
if (videoSource === 'upload') {
cameraView.currentTime = 0;
cameraView.play();
}
// Start detection loop
isDetecting = true;
startBtn.classList.add('hidden');
recordBtn.classList.remove('hidden');
stopBtn.classList.remove('hidden');
statusText.textContent = 'Analyzing video...';
statusText.className = 'text-green-400';
analysisOverlay.classList.remove('hidden');
// Start FPS counter
lastFpsUpdate = Date.now();
frameCount = 0;
// Run detection every 300ms to balance performance and responsiveness
detectionInterval = setInterval(detectEmotions, 300);
}
// Start recording
function startRecording() {
if (!stream || isRecording) return;
try {
recordedChunks = [];
// Initialize MediaRecorder with canvas stream
const options = { mimeType: 'video/webm;codecs=vp9' };
try {
// Create a stream from our canvas
canvasStream = recordingCanvas.captureStream(30); // 30 FPS
mediaRecorder = new MediaRecorder(canvasStream, options);
} catch (e) {
console.warn('Unable to create MediaRecorder with preferred options:', e);
// Fallback to default options
mediaRecorder = new MediaRecorder(canvasStream);
}
mediaRecorder.ondataavailable = function(event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
}
};
mediaRecorder.onstop = async function() {
// Show uploading status
recordingStatus.classList.add('hidden');
uploadStatus.classList.remove('hidden');
// Create blob from recorded chunks
recordedBlob = new Blob(recordedChunks, { type: 'video/webm' });
// Stop the canvas animation
cancelAnimationFrame(animationId);
// Simulate processing progress
let progress = 0;
const progressInterval = setInterval(() => {
progress += 5;
if (progress > 100) progress = 100;
uploadProgressBar.style.width = `${progress}%`;
if (progress === 100) {
clearInterval(progressInterval);
// After processing completes
setTimeout(() => {
uploadStatus.classList.add('hidden');
downloadSection.classList.remove('hidden');
}, 500);
}
}, 100);
};
// Start recording
mediaRecorder.start(100); // Collect data every 100ms
isRecording = true;
recordBtn.innerHTML = '<i class="fas fa-stop mr-2"></i> Stop Recording';
recordBtn.classList.remove('bg-red-600');
recordBtn.classList.add('bg-gray-600');
recordingStatus.classList.remove('hidden');
// Start recording timer
recordingStartTime = Date.now();
updateRecordingTime();
recordingTimer = setInterval(updateRecordingTime, 1000);
// Start drawing frames to canvas for recording
drawToCanvas();
} catch (error) {
console.error('Recording error:', error);
statusText.textContent = 'Error starting recording.';
statusText.className = 'text-red-400';
}
}
// Draw video and overlays to canvas for recording
function drawToCanvas() {
if (!isRecording) return;
// Clear canvas
recordingCtx.clearRect(0, 0, recordingCanvas.width, recordingCanvas.height);
// Draw video frame (mirrored if camera)
recordingCtx.save();
if (videoSource === 'camera') {
recordingCtx.scale(-1, 1);
recordingCtx.drawImage(cameraView, -recordingCanvas.width, 0, recordingCanvas.width, recordingCanvas.height);
} else {
recordingCctx.drawImage(cameraView, 0, 0, recordingCanvas.width, recordingCanvas.height);
}
recordingCtx.restore();
// Draw face detection overlay
const faceBoxes = emotionOverlay.querySelectorAll('.face-box');
const emotionLabels = emotionOverlay.querySelectorAll('.emotion-label');
faceBoxes.forEach(box => {
const style = window.getComputedStyle(box);
recordingCtx.strokeStyle = style.borderColor;
recordingCctx.lineWidth = parseInt(style.borderWidth);
recordingCtx.strokeRect(
parseInt(style.left),
parseInt(style.top),
parseInt(style.width),
parseInt(style.height)
);
recordingCtx.fillStyle = style.backgroundColor;
recordingCtx.globalAlpha = 0.2;
recordingCtx.fillRect(
parseInt(style.left),
parseInt(style.top),
parseInt(style.width),
parseInt(style.height)
);
recordingCtx.globalAlpha = 1.0;
});
emotionLabels.forEach(label => {
const style = window.getComputedStyle(label);
recordingCtx.fillStyle = style.backgroundColor;
recordingCtx.fillRect(
parseInt(style.left),
parseInt(style.top),
label.offsetWidth,
label.offsetHeight
);
recordingCtx.font = `${style.fontSize} ${style.fontFamily}`;
recordingCtx.fillStyle = style.color;
recordingCtx.fillText(
label.textContent,
parseInt(style.left) + 3,
parseInt(style.top) + parseInt(style.fontSize)
);
});
// Draw analysis overlay (now 30% narrower)
if (!analysisOverlay.classList.contains('hidden')) {
recordingCtx.fillStyle = 'rgba(0, 0, 0, 0.7)';
recordingCtx.fillRect(10, recordingCanvas.height - 110, 105, 100); // Width reduced to 105px
// Title
recordingCtx.font = 'bold 10px Arial'; // Smaller font
recordingCtx.fillStyle = 'white';
recordingCtx.fillText('Emotion Analysis', 15, recordingCanvas.height - 95);
// Draw analysis items
const analysisItems = analysisEmotionsContainer.querySelectorAll('.analysis-emotion');
analysisItems.forEach((item, index) => {
const name = item.querySelector('.analysis-emotion-name').textContent;
const value = item.querySelector('.analysis-emotion-value').textContent;
const fill = item.querySelector('.analysis-emotion-fill');
const width = parseFloat(fill.style.width) / 100 * 84; // Adjusted for narrower width
recordingCtx.font = '10px Arial';
recordingCtx.fillStyle = 'white';
recordingCtx.fillText(name, 15, recordingCanvas.height - 80 + (index * 15));
recordingCtx.fillText(value, 40, recordingCanvas.height - 80 + (index * 15)); // Adjusted position
// Meter background
recordingCtx.fillStyle = 'rgba(255, 255, 255, 0.2)';
recordingCtx.fillRect(15, recordingCanvas.height - 75 + (index * 15), 84, 3); // Narrower meter
// Meter fill
recordingCtx.fillStyle = 'white';
recordingCtx.fillRect(15, recordingCanvas.height - 75 + (index * 15), width, 3);
});
}
// Continue animation
animationId = requestAnimationFrame(drawToCanvas);
}
// Stop recording
function stopRecording() {
if (!isRecording || !mediaRecorder) return;
mediaRecorder.stop();
isRecording = false;
recordBtn.innerHTML = '<i class="fas fa-circle mr-2"></i> Record';
recordBtn.classList.remove('bg-gray-600');
recordBtn.classList.add('bg-red-600');
recordingStatus.classList.add('hidden');
// Stop recording timer
clearInterval(recordingTimer);
recordingTimer = null;
// Stop the canvas stream
if (canvasStream) {
canvasStream.getTracks().forEach(track => track.stop());
canvasStream = null;
}
}
// Update recording time display
function updateRecordingTime() {
const elapsed = Math.floor((Date.now() - recordingStartTime) / 1000);
const minutes = Math.floor(elapsed / 60);
const seconds = elapsed % 60;
recordingTime.textContent = `${minutes}:${seconds.toString().padStart(2, '0')}`;
}
// Download recorded video as MP4
function downloadVideo() {
if (!recordedBlob) {
alert('No recording available to download.');
return;
}
// Create a download link
const url = URL.createObjectURL(recordedBlob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
// Set filename with timestamp
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
a.download = `emotion-detection-${timestamp}.webm`;
document.body.appendChild(a);
a.click();
// Clean up
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
// Stop detection
function stopDetection() {
// Stop recording if active
if (isRecording) {
stopRecording();
}
if (stream) {
stream.getTracks().forEach(track => track.stop());
stream = null;
}
if (detectionInterval) {
clearInterval(detectionInterval);
detectionInterval = null;
}
if (animationId) {
cancelAnimationFrame(animationId);
animationId = null;
}
// Pause video if it's playing
if (videoSource === 'upload') {
cameraView.pause();
}
isDetecting = false;
startBtn.classList.remove('hidden');
recordBtn.classList.add('hidden');
stopBtn.classList.add('hidden');
recordingStatus.classList.add('hidden');
uploadStatus.classList.add('hidden');
downloadSection.classList.add('hidden');
analysisOverlay.classList.add('hidden');
statusText.textContent = 'Detection stopped. Ready to start again.';
statusText.className = 'text-yellow-400';
// Clear overlay
emotionOverlay.innerHTML = '';
faceCount.textContent = '0';
detectionSpeed.textContent = '0';
fpsCounter.textContent = '0';
recordingTime.textContent = '0:00';
uploadProgressBar.style.width = '0%';
// Reset emotion meters and history
emotions.forEach((emotion, index) => {
const card = emotionCards[index];
const percentSpan = card.querySelector('.text-right span:first-child');
emotionHistory[emotion.name] = [];
const meterFill = card.querySelector('.meter-fill');
percentSpan.textContent = '0%';
percentSpan.className = emotion.color;
meterFill.style.width = '0%';
card.classList.remove('active');
card.classList.remove('fear-high');
});
// Reset AI detection
resetAIDetection();
}
// Reset AI detection display
function resetAIDetection() {
aiProbabilityValue.textContent = '0%';
aiProbabilityMeter.style.width = '0%';
aiProbabilityValue.className = 'text-blue-400';
// Reset AI tool cards
aiTools.forEach((tool, index) => {
const card = aiToolCards[index + 1]; // Skip the global card
const percentSpan = card.querySelector('span:last-child');
const meterFill = card.querySelector('.meter-fill');
percentSpan.textContent = '0%';
percentSpan.className = tool.textColor;
meterFill.style.width = '0%';
meterFill.style.backgroundColor = tool.color;
card.classList.remove('active');
});
// Reset global AI card
const globalCard = aiToolCards[0];
globalCard.classList.remove('ai-high-probability');
// Hide indicators
aiDetectionIndicators.classList.add('hidden');
aiMetadata.classList.add('hidden');
}
// Update analysis overlay
function updateAnalysisOverlay(expressions) {
analysisEmotionsContainer.innerHTML = '';
emotions.forEach(emotion => {
let probability = expressions[emotion.name] || 0;
// Apply sensitivity multiplier for fear
if (emotion.name === 'fearful' && emotion.sensitivity) {
probability = Math.min(1, probability * emotion.sensitivity);
}
const percent = Math.round(probability * 100);
const emotionElement = document.createElement('div');
emotionElement.className = 'analysis-emotion';
const nameElement = document.createElement('div');
nameElement.className = 'analysis-emotion-name';
nameElement.textContent = emotion.label;
const valueElement = document.createElement('div');
valueElement.className = 'analysis-emotion-value';
valueElement.textContent = `${percent}%`;
const meterContainer = document.createElement('div');
meterContainer.className = 'analysis-emotion-meter';
const meterFill = document.createElement('div');
meterFill.className = 'analysis-emotion-fill';
meterFill.style.width = `${percent}%`;
meterFill.style.backgroundColor = emotion.meterColor.replace('bg-', '').replace('-500', '');
meterContainer.appendChild(meterFill);
emotionElement.appendChild(nameElement);
emotionElement.appendChild(valueElement);
emotionElement.appendChild(meterContainer);
analysisEmotionsContainer.appendChild(emotionElement);
});
}
// Analyze video for AI generation signs
function analyzeAIGeneration(faceData) {
// This is a simulated analysis - in a real app you would use an AI detection API
// Base probability based on video source
let aiProbability = videoSource === 'camera' ? 0.05 : 0.25;
// Random factors to simulate analysis
const randomFactor = Math.random() * 0.3 - 0.15; // -0.15 to +0.15
aiProbability += randomFactor;
// If we have face data, use it to adjust probability
if (faceData && faceData.length > 0) {
const face = faceData[0];
const expressions = face.expressions;
// Certain expression patterns might indicate AI generation
if (expressions.neutral > 0.9) {
aiProbability += 0.2; // Too much neutral expression
}
if (expressions.happy > 0.8 && expressions.happy - expressions.neutral > 0.6) {
aiProbability += 0.15; // Overly happy
}
if (expressions.fearful > 0.7) {
aiProbability += 0.1; // High fear might indicate artifacts
}
// Check for unnatural expression combinations
if (expressions.happy > 0.7 && expressions.angry > 0.5) {
aiProbability += 0.25; // Unlikely combination
}
}
// Clamp between 0 and 1
aiProbability = Math.max(0, Math.min(1, aiProbability));
// Update UI
updateAIDetectionUI(aiProbability);
}
// Update AI detection UI
function updateAIDetectionUI(probability) {
const percent = Math.round(probability * 100);
aiProbabilityValue.textContent = `${percent}%`;
aiProbabilityMeter.style.width = `${percent}%`;
// Change color based on probability
if (probability > 0.7) {
aiProbabilityValue.className = 'text-red-400';
aiProbabilityMeter.parentElement.classList.add('ai-detection-high');
// Highlight global AI card
aiToolCards[0].classList.add('ai-high-probability');
} else if (probability > 0.4) {
aiProbabilityValue.className = 'text-yellow-400';
aiProbabilityMeter.parentElement.classList.remove('ai-detection-high');
aiToolCards[0].classList.remove('ai-high-probability');
} else {
aiProbabilityValue.className = 'text-blue-400';
aiProbabilityMeter.parentElement.classList.remove('ai-detection-high');
aiToolCards[0].classList.remove('ai-high-probability');
}
// Update AI tools probabilities
updateAIToolsProbabilities(probability);
// Show indicators if probability > 30%
if (probability > 0.3) {
updateAIIndicators(probability);
aiDetectionIndicators.classList.remove('hidden');
aiMetadata.classList.remove('hidden');
} else {
aiDetectionIndicators.classList.add('hidden');
aiMetadata.classList.add('hidden');
}
}
// Update AI tools probabilities
function updateAIToolsProbabilities(globalProbability) {
// Calculate probabilities for each tool
const toolProbabilities = aiTools.map(tool => {
// Base probability based on global probability
let prob = globalProbability * (0.7 + Math.random() * 0.6);
// Add some variation
prob += (Math.random() * 0.3 - 0.15);
// Clamp between 0 and 1
return Math.max(0, Math.min(1, prob));
});
// Sort tools by probability (highest first)
const sortedIndices = [...Array(aiTools.length).keys()]
.sort((a, b) => toolProbabilities[b] - toolProbabilities[a]);
// Update UI for each tool
sortedIndices.forEach((toolIndex, displayIndex) => {
const tool = aiTools[toolIndex];
const card = aiToolCards[toolIndex + 1]; // +1 to skip global card
const percent = Math.round(toolProbabilities[toolIndex] * 100);
const percentSpan = card.querySelector('span:last-child');
const meterFill = card.querySelector('.meter-fill');
percentSpan.textContent = `${percent}%`;
percentSpan.className = tool.textColor;
meterFill.style.width = `${percent}%`;
meterFill.style.backgroundColor = tool.color;
// Highlight the top 2 most likely tools
if (displayIndex < 2 && toolProbabilities[toolIndex] > 0.3) {
card.classList.add('active');
} else {
card.classList.remove('active');
}
});
}
// Update AI indicators
function updateAIIndicators(probability) {
// Select 3-5 random indicators
const shuffledIndicators = [...aiIndicators].sort(() => 0.5 - Math.random());
const selectedIndicators = shuffledIndicators.slice(0, 3 + Math.floor(Math.random() * 3));
aiDetectionIndicators.innerHTML = `
<h3 class="font-medium text-white mb-2">Detection Indicators:</h3>
<div class="text-xs space-y-1">
${selectedIndicators.map(indicator => `
<div class="flex items-start">
<span class="text-xs mr-2">•</span>
<span>${indicator}</span>
</div>
`).join('')}
</div>
`;
// Simulate metadata detection
if (probability > 0.5) {
const metadataTypes = [
'AI_ENGINE=RunwayML',
'GENERATION_PARAMS={"seed": 4294967295}',
'CREATOR_TOOL=KlingAI_v1.2.3',
'SOFTWARE=Hailuo Video Generator',
'MODEL_VERSION=StableVideo_1.0'
];
const selectedMetadata = metadataTypes[Math.floor(Math.random() * metadataTypes.length)];
aiMetadata.innerHTML = `
<h3 class="font-medium text-white mb-2">Metadata Analysis:</h3>
<div class="text-xs">
<p class="ai-signature">${selectedMetadata}</p>
</div>
`;
} else {
aiMetadata.innerHTML = `
<h3 class="font-medium text-white mb-2">Metadata Analysis:</h3>
<div class="text-xs">
<p class="ai-signature">No metadata detected</p>
</div>
`;
}
}
// Detect emotions in video frame
async function detectEmotions() {
if (!isDetecting) return;
const startTime = Date.now();
frameCount++;
try {
// Detect faces and expressions
const options = new faceapi.TinyFaceDetectorOptions({
inputSize: 128, // Smaller size for faster detection
scoreThreshold: 0.5
});
const result = await faceapi.detectAllFaces(cameraView, options)
.withFaceLandmarks()
.withFaceExpressions();
const detectionTime = Date.now() - startTime;
detectionSpeed.textContent = detectionTime;
// Update FPS counter every second
const now = Date.now();
if (now - lastFpsUpdate >= 1000) {
fps = Math.round((frameCount * 1000) / (now - lastFpsUpdate));
fpsCounter.textContent = fps;
frameCount = 0;
lastFpsUpdate = now;
}
// Clear previous overlay
emotionOverlay.innerHTML = '';
// Update face count
faceCount.textContent = result.length;
if (result.length === 0) {
// No faces detected
emotions.forEach((emotion, index) => {
const card = emotionCards[index];
const percentSpan = card.querySelector('span:last-child');
const meterFill = card.querySelector('.meter-fill');
percentSpan.textContent = '0%';
percentSpan.className = emotion.color;
meterFill.style.width = '0%';
card.classList.remove('active');
card.classList.remove('fear-high');
});
// Update analysis overlay with empty values
const emptyExpressions = {};
emotions.forEach(emotion => {
emptyExpressions[emotion.name] = 0;
});
updateAnalysisOverlay(emptyExpressions);
// Analyze for AI generation even without faces
analyzeAIGeneration(null);
return;
}
// For simplicity, we'll use the first detected face
const face = result[0];
const expressions = face.expressions;
// Update analysis overlay
updateAnalysisOverlay(expressions);
// Analyze for AI generation
analyzeAIGeneration(result);
// Draw face bounding box
const box = face.detection.box;
const faceBox = document.createElement('div');
faceBox.className = 'face-box';
faceBox.style.left = `${box.x}px`;
faceBox.style.top = `${box.y}px`;
faceBox.style.width = `${box.width}px`;
faceBox.style.height = `${box.height}px`;
emotionOverlay.appendChild(faceBox);
// Draw emotion label
const label = document.createElement('div');
label.className = 'emotion-label';
label.style.left = `${box.x}px`;
label.style.top = `${box.y - 20}px`;
// Find dominant emotion
let dominantEmotion = 'neutral';
let maxProbability = 0;
for (const [emotion, probability] of Object.entries(expressions)) {
// Apply sensitivity multiplier for fear
const adjustedProbability = emotion === 'fearful' ?
Math.min(1, probability * 1.5) : probability;
if (adjustedProbability > maxProbability) {
maxProbability = adjustedProbability;
dominantEmotion = emotion;
}
}
label.textContent = `${dominantEmotion} (${Math.round(maxProbability * 100)}%)`;
emotionOverlay.appendChild(label);
// Update emotion meters
emotions.forEach((emotion, index) => {
let probability = expressions[emotion.name] || 0;
// Apply sensitivity multiplier for fear
if (emotion.name === 'fearful' && emotion.sensitivity) {
probability = Math.min(1, probability * emotion.sensitivity);
}
const percent = Math.round(probability * 100);
const card = emotionCards[index];
const percentSpan = card.querySelector('span:last-child');
const meterFill = card.querySelector('.meter-fill');
// Update emotion history
emotionHistory[emotion.name].push(percent);
const frameCount = emotionHistory[emotion.name].length;
const average = Math.round(emotionHistory[emotion.name].reduce((a, b) => a + b, 0) / frameCount);
// Update average display
const avgElement = card.querySelector('.avg-value');
const countElement = card.querySelector('.frame-count');
avgElement.textContent = `${average}%`;
countElement.textContent = frameCount.toLocaleString();
// Update display
// Calculate change from last measurement
const prevPercent = emotionHistory[emotion.name].length > 1 ?
emotionHistory[emotion.name][emotionHistory[emotion.name].length - 2] : percent;
const change = percent - prevPercent;
// Update display with trend indicator
const changeSpan = card.querySelector('.change-indicator');
const deltaSpan = card.querySelector('span:last-child span:last-child');
if (change > 5) {
changeSpan.textContent = '↑';
changeSpan.className = 'change-indicator change-up';
} else if (change < -5) {
changeSpan.textContent = '↓';
changeSpan.className = 'change-indicator change-down';
} else {
changeSpan.textContent = '→';
changeSpan.className = 'change-indicator change-neutral';
}
deltaSpan.textContent = `${Math.abs(change)}%`;
percentSpan.innerHTML = `${percent}%`;
percentSpan.className = `text-lg font-bold ${emotion.color}`;
meterFill.style.width = `${percent}%`;
meterFill.className = `meter-fill h-full bg-gradient-to-r ${emotion.meterColor} to-${emotion.meterColor.replace('400','600')} transition-all duration-300`;
// Highlight dominant emotion
if (emotion.name === dominantEmotion) {
card.classList.add('active');
// Special highlighting for high fear levels
if (emotion.name === 'fearful' && percent > 40) {
card.classList.add('fear-high');
} else {
card.classList.remove('fear-high');
}
} else {
card.classList.remove('active');
card.classList.remove('fear-high');
}
});
} catch (error) {
console.error('Detection error:', error);
statusText.textContent = 'Error during detection. Try again.';
statusText.className = 'text-red-400';
}
}
// Event Listeners
startBtn.addEventListener('click', startDetection);
recordBtn.addEventListener('click', function() {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
});
stopBtn.addEventListener('click', stopDetection);
localDownloadBtn.addEventListener('click', downloadVideo);
// Initialize on load
loadModels();
// Clean up on page unload
window.addEventListener('beforeunload', () => {
stopDetection();
if (videoSource === 'upload' && cameraView.src) {
URL.revokeObjectURL(cameraView.src);
}
});
});
</script>
<!-- Font Awesome for icons -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/js/all.min.js"></script>
<p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=Lauroscope/Picmotion" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p><p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=Lauroscope/video-analysis" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p></body>
</html>