|
import streamlit as st |
|
import streamlit.components.v1 as components |
|
|
|
def audio_visualizer(): |
|
|
|
html_code = """ |
|
<div style="background: #1a1a1a; padding: 20px; border-radius: 10px;"> |
|
<button id="startButton" style="padding: 10px 20px; background: #4CAF50; color: white; border: none; border-radius: 4px; cursor: pointer; font-size: 20px;"> |
|
π΅ Start |
|
</button> |
|
<canvas id="visualizer" style="width: 100%; height: 400px;"></canvas> |
|
|
|
<script> |
|
const canvas = document.getElementById('visualizer'); |
|
const ctx = canvas.getContext('2d'); |
|
const startButton = document.getElementById('startButton'); |
|
const emojis = ['π΅', 'πΆ', 'πΈ', 'πΉ', 'πΊ', 'π·', 'π₯', 'π»']; |
|
let audioContext, analyser, dataArray, source, animationId; |
|
let isActive = false; |
|
|
|
function setupCanvas() { |
|
canvas.width = canvas.offsetWidth; |
|
canvas.height = canvas.offsetHeight; |
|
} |
|
|
|
function init() { |
|
setupCanvas(); |
|
audioContext = new (window.AudioContext || window.webkitAudioContext)(); |
|
analyser = audioContext.createAnalyser(); |
|
analyser.fftSize = 256; |
|
dataArray = new Uint8Array(analyser.frequencyBinCount); |
|
} |
|
|
|
function draw() { |
|
if (!isActive) return; |
|
animationId = requestAnimationFrame(draw); |
|
analyser.getByteFrequencyData(dataArray); |
|
|
|
ctx.fillStyle = 'rgba(26, 26, 26, 0.2)'; |
|
ctx.fillRect(0, 0, canvas.width, canvas.height); |
|
|
|
// Draw bars and emojis |
|
const barWidth = canvas.width / analyser.frequencyBinCount; |
|
dataArray.forEach((value, i) => { |
|
const percent = value / 255; |
|
const h = percent * canvas.height; |
|
const hue = i / analyser.frequencyBinCount * 360; |
|
|
|
ctx.fillStyle = `hsl(${hue}, 70%, 50%)`; |
|
ctx.fillRect(i * barWidth, canvas.height - h, barWidth - 1, h); |
|
|
|
if (i % 16 === 0) { |
|
ctx.font = `${20 + percent * 20}px Arial`; |
|
ctx.fillText( |
|
emojis[i % emojis.length], |
|
i * barWidth, |
|
canvas.height - h - 30 |
|
); |
|
} |
|
}); |
|
} |
|
|
|
async function toggleAudio() { |
|
if (!isActive) { |
|
try { |
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); |
|
source = audioContext.createMediaStreamSource(stream); |
|
source.connect(analyser); |
|
isActive = true; |
|
startButton.textContent = 'π΅ Stop'; |
|
draw(); |
|
} catch (err) { |
|
console.error('Error:', err); |
|
startButton.textContent = 'β Error'; |
|
} |
|
} else { |
|
source.disconnect(); |
|
cancelAnimationFrame(animationId); |
|
isActive = false; |
|
startButton.textContent = 'π΅ Start'; |
|
ctx.clearRect(0, 0, canvas.width, canvas.height); |
|
} |
|
} |
|
|
|
window.addEventListener('resize', setupCanvas); |
|
startButton.addEventListener('click', toggleAudio); |
|
init(); |
|
</script> |
|
</div> |
|
""" |
|
|
|
st.title("π΅ Deep Research Evaluator π") |
|
st.markdown("### Audio Visualizer with Emojis") |
|
|
|
|
|
st.markdown(""" |
|
<style> |
|
.stApp { |
|
background-color: #0e1117; |
|
color: white; |
|
} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|
|
|
|
components.html(html_code, height=500) |
|
|
|
if __name__ == "__main__": |
|
audio_visualizer() |