File size: 4,231 Bytes
17054d3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 |
import streamlit as st
import streamlit.components.v1 as components
def audio_visualizer():
# HTML/JS component for audio visualization
html_code = """
<div style="background: #1a1a1a; padding: 20px; border-radius: 10px;">
<button id="startButton" style="padding: 10px 20px; background: #4CAF50; color: white; border: none; border-radius: 4px; cursor: pointer; font-size: 20px;">
π΅ Start
</button>
<canvas id="visualizer" style="width: 100%; height: 400px;"></canvas>
<script>
const canvas = document.getElementById('visualizer');
const ctx = canvas.getContext('2d');
const startButton = document.getElementById('startButton');
const emojis = ['π΅', 'πΆ', 'πΈ', 'πΉ', 'πΊ', 'π·', 'π₯', 'π»'];
let audioContext, analyser, dataArray, source, animationId;
let isActive = false;
function setupCanvas() {
canvas.width = canvas.offsetWidth;
canvas.height = canvas.offsetHeight;
}
function init() {
setupCanvas();
audioContext = new (window.AudioContext || window.webkitAudioContext)();
analyser = audioContext.createAnalyser();
analyser.fftSize = 256;
dataArray = new Uint8Array(analyser.frequencyBinCount);
}
function draw() {
if (!isActive) return;
animationId = requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray);
ctx.fillStyle = 'rgba(26, 26, 26, 0.2)';
ctx.fillRect(0, 0, canvas.width, canvas.height);
// Draw bars and emojis
const barWidth = canvas.width / analyser.frequencyBinCount;
dataArray.forEach((value, i) => {
const percent = value / 255;
const h = percent * canvas.height;
const hue = i / analyser.frequencyBinCount * 360;
ctx.fillStyle = `hsl(${hue}, 70%, 50%)`;
ctx.fillRect(i * barWidth, canvas.height - h, barWidth - 1, h);
if (i % 16 === 0) {
ctx.font = `${20 + percent * 20}px Arial`;
ctx.fillText(
emojis[i % emojis.length],
i * barWidth,
canvas.height - h - 30
);
}
});
}
async function toggleAudio() {
if (!isActive) {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
isActive = true;
startButton.textContent = 'π΅ Stop';
draw();
} catch (err) {
console.error('Error:', err);
startButton.textContent = 'β Error';
}
} else {
source.disconnect();
cancelAnimationFrame(animationId);
isActive = false;
startButton.textContent = 'π΅ Start';
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
}
window.addEventListener('resize', setupCanvas);
startButton.addEventListener('click', toggleAudio);
init();
</script>
</div>
"""
st.title("π΅ Deep Research Evaluator π")
st.markdown("### Audio Visualizer with Emojis")
# Custom CSS
st.markdown("""
<style>
.stApp {
background-color: #0e1117;
color: white;
}
</style>
""", unsafe_allow_html=True)
# Render HTML component
components.html(html_code, height=500)
if __name__ == "__main__":
audio_visualizer() |