py-learn / src /app /chat /chat.component.ts
Anupriya
added footer and logo
6dc3ffe
import { Component, Inject, OnDestroy, PLATFORM_ID, ChangeDetectorRef, ViewChild, ElementRef, Renderer2, AfterViewChecked, Output, EventEmitter } from '@angular/core';
import { ApiService } from './api.service';
import { FormsModule } from '@angular/forms';
import { CommonModule, isPlatformBrowser } from '@angular/common';
import { Router, RouterModule } from '@angular/router';
import { Subscription } from 'rxjs';
import { finalize } from 'rxjs/operators';
import { HttpClient } from '@angular/common/http';
import { lastValueFrom } from 'rxjs';
import { HeaderComponent } from '../shared/header/header.component';
type Grade = 'lowergrade' | 'midgrade' | 'highergrade';
@Component({
selector: 'app-chat',
standalone: true,
imports: [FormsModule, CommonModule, RouterModule, HeaderComponent],
templateUrl: './chat.component.html',
styleUrls: ['./chat.component.css']
})
export class ChatComponent implements OnDestroy {
@ViewChild('waveformCanvas') waveformCanvas!: ElementRef<HTMLCanvasElement>;
@ViewChild('popupTranscriptEl') popupTranscriptEl!: ElementRef<HTMLDivElement>;
@Output() transcriptConfirmed = new EventEmitter<string>();
isRecording = false;
showMicPopup = false;
popupTranscript = '';
errorMessage = '';
private recognition: any = null;
private _recordingFinalBuffer = '';
private _recordingInterimBuffer = '';
private audioContext: AudioContext | null = null;
private analyser: AnalyserNode | null = null;
private dataArray: Uint8Array | null = null;
private mediaStream: MediaStream | null = null;
private animationFrameId: number | null = null;
// restart helpers
private _recognitionActive = false;
private _restartTimer: any = null;
private _restartAttempts = 0;
private _maxRestartDelay = 1500;
showQuestions: boolean = false;
isSubmitting: boolean = false;
/** Generated (initial) or follow-up questions shown on focus */
pdfQuestions: string[] = [];
pdfLoading: boolean = false;
/** Chat state */
userInput: string = '';
messages: Array<{
from: string;
text: string;
timestamp: string;
isPlaying?: boolean;
isMuted?: boolean;
suggestions?: string[];
source_ids?: string[];
videoUrl?: string;
audioUrl?: string;
playingVideoUrl?: string;
pending?: boolean;
isSynthesizing?: boolean;
isVideoSynthesizing?: boolean;
}> = [];
isTyping: boolean = false;
@ViewChild('chatBox') chatBox!: ElementRef;
/** Speech / mic state */
isLoadingSpeech: boolean = false;
selectedVoice: SpeechSynthesisVoice | null = null;
speechSynthesisInstance: SpeechSynthesisUtterance | null = null;
isListening: boolean = false;
isProcessingSpeech: boolean = false;
isSpeaking: boolean = false;
isAudioPaused: boolean = false;
/** Suggestions for typed input (powered by PDF too) */
suggestions: string[] = [];
isInputValid = false;
/** Popup */
/** Subscriptions */
private responseSub?: Subscription;
/** Store last Q/A to drive follow-ups */
private lastQuestion: string | null = null;
private lastAnswer: string | null = null;
private lastSourceIds: string[] = [];
/** NEW: only allow follow-ups when last answer was grounded in textbook pages */
private lastAnswerHasContext: boolean = false;
// --- Multi-chat state: keep active `this.messages` pointing to the active chat
private activeChatIndex = 0; // 0 = primary, 1 = secondary
// Per-chat storage (persist when switching)
private primaryMessages: typeof this.messages = [];
private secondaryMessages: typeof this.messages = [];
private primaryIsVideoEnabledIndex: boolean[] = [];
private secondaryIsVideoEnabledIndex: boolean[] = [];
private primaryLastQuestionContext = '';
private secondaryLastQuestionContext = '';
private primaryLastAnswerContext = '';
private secondaryLastAnswerContext = '';
private primaryLastSourceIdsContext: string[] = [];
private secondaryLastSourceIdsContext: string[] = [];
private primaryCurrentFollowups: string[] = [];
private secondaryCurrentFollowups: string[] = [];
private primaryPendingAiIndex: number | null = null;
private secondaryPendingAiIndex: number | null = null;
private primaryServerAudioMessageIndex: number | null = null;
private secondaryServerAudioMessageIndex: number | null = null;
private primaryIsReadingIndex: number | null = null;
private secondaryIsReadingIndex: number | null = null;
private primaryIsVideoPlayingIndex: number | null = null;
private secondaryIsVideoPlayingIndex: number | null = null;
// --- end multi-chat state
currentFollowups: string[] = [];
/*private shouldAutoScroll = true;*/
videoUrl = '';
aiResponseInterval: any = null;
isAiResponding = false;
isVideoEnabledIndex: boolean[] = [];
private currentExplainSub: Subscription | null = null;
private currentFollowupsSub: Subscription | null = null;
serverAudio: HTMLAudioElement | null = null;
serverAudioMessageIndex: number | null = null;
private pendingAiIndex: number | null = null;
isMuted = false;
showUserGuide = false;
openDropdownIndex: number | null = null;
isReadingIndex: number | null = null;
isVideoPlayingIndex: number | null = null;
private readAloudUtterance: SpeechSynthesisUtterance | null = null;
private lastQuestionContext: string = '';
private lastAnswerContext: string = '';
private lastSourceIdsContext: string[] = [];
private popupListeningMode = false;
private _savedRecognitionOnResult: any = null;
private _savedInterimResults = false;
private _suppressDefaultOnResult = false;
private shouldAutoScroll = true;
constructor(
private apiService: ApiService,
private cdr: ChangeDetectorRef,
@Inject(PLATFORM_ID) private platformId: object,
private http: HttpClient,
private router: Router,
private renderer: Renderer2
) {
// Initialize primary/secondary storage with the current defaults
this.primaryMessages = this.messages;
this.secondaryMessages = [];
this.primaryIsVideoEnabledIndex = this.isVideoEnabledIndex;
this.secondaryIsVideoEnabledIndex = [];
this.primaryLastQuestionContext = this.lastQuestionContext;
this.primaryLastAnswerContext = this.lastAnswerContext;
this.primaryLastSourceIdsContext = this.lastSourceIdsContext.slice();
this.primaryCurrentFollowups = this.currentFollowups.slice();
this.primaryPendingAiIndex = this.pendingAiIndex;
this.primaryServerAudioMessageIndex = this.serverAudioMessageIndex;
this.primaryIsReadingIndex = this.isReadingIndex;
this.primaryIsVideoPlayingIndex = this.isVideoPlayingIndex;
// secondary fields are already empty/default
if (!isPlatformBrowser(this.platformId)) return;
const SR = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
if (!SR) {
console.warn('SpeechRecognition not supported');
return;
}
this.recognition = new SR();
this.recognition.continuous = true;
this.recognition.interimResults = true;
this.recognition.lang = 'en-US';
try { this.recognition.maxAlternatives = 1; } catch { /* ignore */ }
this.recognition.onstart = () => { this._recognitionActive = true; this._restartAttempts = 0; };
this.recognition.onspeechstart = () => { this._recognitionActive = true; };
this.recognition.onspeechend = () => { this._recognitionActive = false; };
this.recognition.onresult = (event: any) => {
if (!this.isRecording) return;
let interim = '';
let final = '';
for (let i = event.resultIndex; i < event.results.length; i++) {
const res = event.results[i];
const t = (res && res[0] && res[0].transcript) ? res[0].transcript : '';
if (res.isFinal) final += t + ' ';
else interim += t + ' ';
}
if (final) {
this._recordingFinalBuffer += final;
this._recordingInterimBuffer = '';
} else {
this._recordingInterimBuffer = interim;
}
// keep UI quiet while recording; show only after Done pressed
this.cdr.detectChanges();
};
this.recognition.onerror = (e: any) => {
console.error('Recognition error', e);
if (e?.error === 'not-allowed') {
this.errorMessage = 'Microphone access denied';
this.isRecording = false;
} else {
this.errorMessage = `Error: ${e?.error || 'unknown'}`;
}
if (this.isRecording && (e?.error === 'no-speech' || e?.error === 'aborted' || e?.error === 'network')) {
if (this._restartTimer) clearTimeout(this._restartTimer);
const delay = Math.min(400 * (this._restartAttempts + 1), this._maxRestartDelay);
this._restartTimer = setTimeout(() => {
try { if (this.recognition && !this._recognitionActive) this.recognition.start(); } catch { this._restartAttempts++; }
}, delay);
} else {
if (e?.error !== 'not-allowed') this._recognitionActive = false;
}
this.cdr.detectChanges();
};
this.recognition.onend = () => {
this._recognitionActive = false;
if (this.isRecording && this.showMicPopup) {
if (this._restartTimer) clearTimeout(this._restartTimer);
const delay = Math.min(250 + (this._restartAttempts * 200), this._maxRestartDelay);
this._restartTimer = setTimeout(() => {
try { if (this.recognition && !this._recognitionActive) this.recognition.start(); } catch { this._restartAttempts++; }
}, delay);
}
};
this.recognition.onnomatch = () => { /* noop */ };
}
selectHardcodedQuestion(question: string): void {
try { console.log('[CHAT] default question selected:', question); } catch { }
this.showQuestions = false;
this.sendMessage(question);
this.userInput = '';
}
private ensureGradeLevel(defaultGrade: Grade = 'lowergrade'): void {
const g = (localStorage.getItem('gradeLevel') || '').toLowerCase();
if (g !== 'lowergrade' && g !== 'midgrade' && g !== 'highergrade') {
localStorage.setItem('gradeLevel', defaultGrade);
console.info('[Chat] gradeLevel not set; defaulted to', defaultGrade);
} else {
console.info('[Chat] gradeLevel =', g);
}
}
ngOnInit(): void {
this.ensureGradeLevel();
if (window.speechSynthesis.onvoiceschanged !== undefined) {
window.speechSynthesis.onvoiceschanged = () => {
this.loadVoices();
};
}
this.loadVoices();
}
ngOnDestroy(): void {
if (this.currentExplainSub) { this.currentExplainSub.unsubscribe(); this.currentExplainSub = null; }
if (this.currentFollowupsSub) { this.currentFollowupsSub.unsubscribe(); this.currentFollowupsSub = null; }
if (isPlatformBrowser(this.platformId)) {
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
window.removeEventListener('beforeunload', this.handleUnload);
//window.removeEventListener('storage', this.storageHandler);
}
this.stopServerAudio();
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
}
private handleUnload = (): void => {
if (window.speechSynthesis) {
window.speechSynthesis.cancel();
}
};
ngAfterViewInit() {
this.chatBox.nativeElement.addEventListener('scroll', () => {
const el = this.chatBox.nativeElement;
const atBottom = el.scrollHeight - el.clientHeight - el.scrollTop < 50;
this.shouldAutoScroll = atBottom;
});
}
scrollToBottom(): void {
if (this.shouldAutoScroll) {
try {
this.chatBox.nativeElement.scrollTo({
top: this.chatBox.nativeElement.scrollHeight,
behavior: 'smooth'
});
} catch { }
}
}
// --- Public API: toggle between chat A/B
toggleChat(): void {
const target = 1 - this.activeChatIndex;
this.switchToChat(target);
}
switchToChat(index: number): void {
if (index === this.activeChatIndex) return;
// Stop any playing media on the current chat first so saved state doesn't preserve "playing" flags
this.stopServerAudio();
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
this.stopAllVideo();
// Save current active chat state (now that media is stopped)
this.saveCurrentChatState(this.activeChatIndex);
// Restore target chat state
this.restoreChatState(index);
this.activeChatIndex = index;
this.cdr.detectChanges();
}
private saveCurrentChatState(idx: number): void {
// Make a shallow copy of messages and clear transient playback flags so the saved chat never
// preserves an "isPlaying" or inline-playing video state.
const sanitizedMessages = (this.messages || []).map(m => ({
...m,
isPlaying: false,
playingVideoUrl: '',
// keep pending / audio/video urls etc. — only clear playback flags
}));
if (idx === 0) {
this.primaryMessages = sanitizedMessages;
this.primaryIsVideoEnabledIndex = (this.isVideoEnabledIndex || []).slice();
this.primaryLastQuestionContext = this.lastQuestionContext;
this.primaryLastAnswerContext = this.lastAnswerContext;
this.primaryLastSourceIdsContext = this.lastSourceIdsContext.slice();
this.primaryCurrentFollowups = this.currentFollowups.slice();
this.primaryPendingAiIndex = this.pendingAiIndex;
// serverAudioMessageIndex should be null because we stopped audio before saving
this.primaryServerAudioMessageIndex = null;
this.primaryIsReadingIndex = null;
this.primaryIsVideoPlayingIndex = null;
} else {
this.secondaryMessages = sanitizedMessages;
this.secondaryIsVideoEnabledIndex = (this.isVideoEnabledIndex || []).slice();
this.secondaryLastQuestionContext = this.lastQuestionContext;
this.secondaryLastAnswerContext = this.lastAnswerContext;
this.secondaryLastSourceIdsContext = this.lastSourceIdsContext.slice();
this.secondaryCurrentFollowups = this.currentFollowups.slice();
this.secondaryPendingAiIndex = this.pendingAiIndex;
this.secondaryServerAudioMessageIndex = null;
this.secondaryIsReadingIndex = null;
this.secondaryIsVideoPlayingIndex = null;
}
}
private restoreChatState(idx: number): void {
if (idx === 0) {
this.messages = this.primaryMessages || [];
this.isVideoEnabledIndex = this.primaryIsVideoEnabledIndex || [];
this.lastQuestionContext = this.primaryLastQuestionContext || '';
this.lastAnswerContext = this.primaryLastAnswerContext || '';
this.lastSourceIdsContext = (this.primaryLastSourceIdsContext || []).slice();
this.currentFollowups = (this.primaryCurrentFollowups || []).slice();
this.pendingAiIndex = this.primaryPendingAiIndex;
this.serverAudioMessageIndex = this.primaryServerAudioMessageIndex;
this.isReadingIndex = this.primaryIsReadingIndex;
this.isVideoPlayingIndex = this.primaryIsVideoPlayingIndex;
} else {
this.messages = this.secondaryMessages || [];
this.isVideoEnabledIndex = this.secondaryIsVideoEnabledIndex || [];
this.lastQuestionContext = this.secondaryLastQuestionContext || '';
this.lastAnswerContext = this.secondaryLastAnswerContext || '';
this.lastSourceIdsContext = (this.secondaryLastSourceIdsContext || []).slice();
this.currentFollowups = (this.secondaryCurrentFollowups || []).slice();
this.pendingAiIndex = this.secondaryPendingAiIndex;
this.serverAudioMessageIndex = this.secondaryServerAudioMessageIndex;
this.isReadingIndex = this.secondaryIsReadingIndex;
this.isVideoPlayingIndex = this.secondaryIsVideoPlayingIndex;
}
// Ensure arrays exist to avoid undefined errors elsewhere
if (!this.messages) this.messages = [];
if (!this.isVideoEnabledIndex) this.isVideoEnabledIndex = [];
if (!this.currentFollowups) this.currentFollowups = [];
}
startFromPopup(): void {
this._suppressDefaultOnResult = true;
try { if (this.recognition && typeof this.recognition.stop === 'function') this.recognition.stop(); } catch { }
this._restoreRecognitionHandlers();
this.isListening = false;
this.showMicPopup = false;
const message = (this.popupTranscript || '').trim();
this.popupTranscript = '';
if (!message) { this.errorMessage = 'No speech captured. Please try again.'; this.cdr.detectChanges(); return; }
this.sendMessage(message);
}
/** Show questions on focus: initial (no answer yet) or follow-ups (after an answer) */
showHardcodedQuestions(): void {
setTimeout(async () => {
this.showQuestions = true;
// NEW: only fetch follow-ups if last answer had grounded textbook context
if (this.lastAnswer && this.lastAnswerHasContext) {
this.fetchFollowupQuestions();
} else {
this.fetchInitialQuestions();
}
}, 100);
}
hideHardcodedQuestions(): void {
setTimeout(() => {
this.showQuestions = false;
}, 200);
}
/** Initial questions generated from PDFs (topicless OPEN) */
private fetchInitialQuestions(n: number = 5): void {
this.pdfLoading = true;
this.pdfQuestions = [];
this.apiService.generateOpenQuestions({ qtype: 'OPEN', n, topic: '' })
.subscribe({
next: (resp) => {
const items = Array.isArray(resp?.questions) ? resp.questions : [];
this.pdfQuestions = items.map((q: any) => typeof q === 'string' ? q : (q?.question || '')).filter(Boolean);
if (!this.pdfQuestions.length && resp?.note) {
console.warn('Question generator note:', resp.note);
}
this.pdfLoading = false;
this.cdr.detectChanges();
},
error: () => { this.pdfLoading = false; this.pdfQuestions = []; this.cdr.detectChanges(); }
});
}
/** Follow-ups after an answer */
private fetchFollowupQuestions(n: number = 5): void {
// NEW: guard — if no grounded context, fall back to initial questions
if (!this.lastQuestion || !this.lastAnswer || !this.lastAnswerHasContext || !this.lastSourceIds.length) {
this.fetchInitialQuestions(n);
return;
}
this.pdfLoading = true;
this.pdfQuestions = [];
// NEW: pass source_ids so backend keeps follow-ups within same pages/section
this.apiService.suggestFollowups({
last_question: this.lastQuestion,
last_answer: this.lastAnswer,
n,
// cast avoids TS excess property checks if your service type has not been widened yet
source_ids: this.lastSourceIds
} as any).subscribe({
next: (resp) => {
const list = Array.isArray(resp?.suggestions) ? resp.suggestions : [];
this.pdfQuestions = list.filter((s: string) => !!s);
this.pdfLoading = false;
this.cdr.detectChanges();
},
error: () => {
this.pdfLoading = false;
this.pdfQuestions = [];
this.cdr.detectChanges();
}
});
}
/** Click on one generated question */
selectGeneratedQuestion(question: string): void {
this.userInput = question;
this.showQuestions = false;
setTimeout(() => {
this.sendMessage();
this.userInput = '';
}, 80);
}
/** Type-ahead suggestions sourced from PDFs (OPEN on the typed topic) */
getSuggestions(): void {
if (!this.userInput || this.userInput.trim().length < 1 || this.isSpeaking) {
this.suggestions = [];
return;
}
this.apiService.generateOpenQuestions({ qtype: 'OPEN', n: 5, topic: this.userInput })
.subscribe({
next: (resp) => {
const items = Array.isArray(resp?.questions) ? resp.questions : [];
this.suggestions = items
.map((q: any) => (typeof q === 'string' ? q : (q?.question || '')))
.filter((s: string) => !!s);
},
error: () => { this.suggestions = []; }
});
}
selectSuggestion(suggestion: string): void {
this.userInput = suggestion;
this.suggestions = [];
this.sendMessage();
}
/** Send question to backend for an answer */
/** Send question to backend for an answer */
sendMessage(inputText?: string): void {
const message = inputText ? inputText.trim() : this.userInput.trim();
if (!message) return;
this.isSubmitting = true;
const timestamp = new Date().toLocaleTimeString();
this.messages.push({ from: 'user', text: message, timestamp });
this.userInput = '';
this.isTyping = true;
this.cdr.detectChanges();
this.shouldAutoScroll = true;
this.scrollToBottom();
this.responseSub = this.apiService.explainGrammar({
question: message,
// ask backend to synthesize audio/video only when user toggles are ON
synthesize_audio: Boolean(this.isVoiceEnabled),
synthesize_video: Boolean(this.isTutorEnabled)
})
.pipe(finalize(() => {
this.isSubmitting = false;
}))
.subscribe({
next: (response) => {
this.isTyping = false;
const explanation =
(response?.answer || response?.response || response?.text || 'No explanation available.').trim();
const sourceIds: string[] = Array.isArray(response?.source_ids)
? response.source_ids.filter((s: any) => typeof s === 'string' && s.trim().length > 0)
: [];
const audioUrl = (response?.audio_url || response?.audioUrl || '') as string;
const videoUrl = (response?.video_url || response?.videoUrl || '') as string;
// Store question + source ids *now*.
this.lastQuestion = message;
this.lastSourceIds = sourceIds;
const notFound = /No information available in the provided textbook content/i.test(explanation);
const hasContext = !!sourceIds.length && !notFound;
// Pass URLs along to the stream/attach to message
this.streamAiAnswer(explanation, sourceIds, hasContext, audioUrl || undefined, videoUrl || undefined);
},
error: (err) => {
console.error('API Error:', err);
this.isTyping = false;
const errorMessage = 'Error: Could not get a response from the server.';
this.streamAiAnswer(errorMessage, [], false);
}
});
}
/** Show AI answer word-by-word and start audio */
/** Show AI answer word-by-word and start audio */
private streamAiAnswer(explanation: string, sourceIds: string[], hasContext: boolean, audioUrl?: string, videoUrl?: string): void {
const text = (explanation || '').trim() || 'No explanation available.';
const timestamp = new Date().toLocaleTimeString();
// Create an empty AI message first
const aiIndex = this.messages.push({
from: 'ai',
text: '',
timestamp,
source_ids: sourceIds,
pending: true,
audioUrl: audioUrl || '',
videoUrl: videoUrl || '',
playingVideoUrl: ''
} as any) - 1;
// ensure video-enabled index has an entry for this message
if (this.isVideoEnabledIndex.length <= aiIndex) {
this.isVideoEnabledIndex[aiIndex] = false;
}
this.isAiResponding = true;
this.shouldAutoScroll = true;
this.cdr.detectChanges();
// Animate the text word by word
this.animateAiResponse(text, aiIndex, () => {
// When streaming is finished, we finally store lastAnswer
this.lastAnswer = text;
this.lastAnswerHasContext = hasContext;
// After the AI text finishes streaming, automatically play server media when toggles are enabled.
// If both audio and video are present and both toggles are enabled, video takes precedence.
this.autoPlayMediaForMessage(aiIndex);
});
// Only run client-side TTS when there is no server-provided audio URL.
if (!audioUrl && this.isVoiceEnabled) {
this.speakResponse(text);
}
}
/**
* Decide and start playback for a message that already has `audioUrl` and/or `videoUrl`.
* Behavior:
* - If both videoUrl && audioUrl && both video and audio toggles are ON -> play video (video wins)
* - Else if videoUrl && video toggle ON -> play video
* - Else if audioUrl && voice toggle ON -> play audio
*/
private autoPlayMediaForMessage(index: number): void {
const msg = this.messages[index] as any;
if (!msg) return;
const hasVideo = !!(msg.videoUrl && msg.videoUrl.trim());
const hasAudio = !!(msg.audioUrl && msg.audioUrl.trim());
// If both present and user enabled both, prefer video
if (hasVideo && this.isTutorEnabled) {
try { this.stopServerAudio(); } catch { /* noop */ }
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
// request autoplay (will mute to allow autoplay)
this.openMessageVideo(index, true);
return;
}
// If video present but tutor (video) is not enabled, do not auto-play it.
// Only auto-play audio if user enabled voice
if (hasAudio && this.isVoiceEnabled) {
// play server audio (this will stop videos if any)
// ensure inline videos are stopped
try { this.stopAllVideo(); } catch { /* noop */ }
// If message already has audioUrl, play it
this.playServerAudioForMessage(index);
return;
}
// If no server media played and no TTS, do nothing (user may manually click synth).
}
displaySource(tag: string): string {
if (!tag) return '';
const [path, pagePart] = tag.split('#p');
const file = path.split(/[/\\]/).pop() || path;
return pagePart ? `${file} p${pagePart}` : file;
}
/** Render helpers */
formatStructuredResponse(text: string): string {
return text
.replace(/\n/g, '<br>')
.replace(/(\d+)\.\s/g, '<b>$1.</b> ')
.replace(/\•\s/g, '✔️ ')
.replace(/\-\s/g, '🔹 ')
.replace(/(\*\*)(.*?)\1/g, '<b>$2</b>');
}
/** TTS helpers */
/** TTS helpers */
/** TTS helpers + typing animation */
animateAiResponse(
responseText: string,
targetIndex?: number,
onDone?: () => void
): void {
if (!responseText) {
this.isAiResponding = false;
return;
}
// Find or create the AI message to animate into
let aiIndex: number | null = null;
if (typeof targetIndex === 'number' &&
this.messages[targetIndex] &&
this.messages[targetIndex].from === 'ai') {
aiIndex = targetIndex;
} else {
for (let i = this.messages.length - 1; i >= 0; i--) {
if (this.messages[i].from === 'ai') { aiIndex = i; break; }
}
}
if (aiIndex === null || aiIndex < 0 || !this.messages[aiIndex]) {
this.messages.push({
from: 'ai',
text: '',
timestamp: new Date().toLocaleTimeString()
} as any);
aiIndex = this.messages.length - 1;
this.isVideoEnabledIndex.push(false);
}
const aiMsg = this.messages[aiIndex] as any;
if (this.aiResponseInterval) {
clearInterval(this.aiResponseInterval);
this.aiResponseInterval = null;
}
aiMsg.text = '';
aiMsg.pending = true;
this.isAiResponding = true;
this.cdr.detectChanges();
const words = responseText.split(/\s+/).filter(w => w.length);
let idx = 0;
// 1 word every 200 ms – you can tune this
const speedMs = 200;
this.aiResponseInterval = setInterval(() => {
if (idx < words.length) {
aiMsg.text = words.slice(0, idx + 1).join(' ');
idx++;
this.cdr.detectChanges();
this.scrollToBottom(); // keep view at bottom while streaming
} else {
clearInterval(this.aiResponseInterval!);
this.aiResponseInterval = null;
aiMsg.text = responseText;
aiMsg.pending = false;
this.isAiResponding = false;
if (onDone) {
onDone();
}
this.cdr.detectChanges();
this.scrollToBottom();
}
}, speedMs);
}
stopAiResponse(): void {
if (this.currentExplainSub) { this.currentExplainSub.unsubscribe(); this.currentExplainSub = null; }
if (this.currentFollowupsSub) { this.currentFollowupsSub.unsubscribe(); this.currentFollowupsSub = null; }
if (this.aiResponseInterval) { clearInterval(this.aiResponseInterval); this.aiResponseInterval = null; }
this.stopServerAudio();
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
if (this.pendingAiIndex !== null && this.messages[this.pendingAiIndex] && this.messages[this.pendingAiIndex].from === 'ai') {
const msg = this.messages[this.pendingAiIndex];
msg.text = 'Response cancelled.';
msg.timestamp = new Date().toLocaleTimeString();
msg.suggestions = [];
msg.audioUrl = '';
msg.videoUrl = '';
msg.playingVideoUrl = '';
msg.pending = false;
if (this.isVideoEnabledIndex.length > this.pendingAiIndex) this.isVideoEnabledIndex[this.pendingAiIndex] = false;
this.pendingAiIndex = null;
} else {
const revIndex = [...this.messages].reverse().findIndex(m => m.from === 'ai');
if (revIndex !== -1) {
const actualIndex = this.messages.length - 1 - revIndex;
const msg = this.messages[actualIndex];
msg.text = 'Response cancelled.';
msg.timestamp = new Date().toLocaleTimeString();
msg.suggestions = [];
msg.audioUrl = '';
msg.videoUrl = '';
msg.playingVideoUrl = '';
msg.pending = false;
if (this.isVideoEnabledIndex.length > actualIndex) this.isVideoEnabledIndex[actualIndex] = false;
} else {
this.messages.push({ from: 'ai', text: 'Response cancelled.', timestamp: new Date().toLocaleTimeString() });
this.isVideoEnabledIndex.push(false);
}
}
this.isAiResponding = false;
this.isTyping = false;
this.isSpeaking = false;
this.isReadingIndex = null;
this.cdr.detectChanges();
}
speakResponse(responseText: string): void {
if (!responseText) return;
// Only speak when the Voice toggle is ON
if (!this.isVoiceEnabled) return;
this.stopAllVideo();
const speech = new SpeechSynthesisUtterance();
speech.text = responseText;
speech.lang = 'en-US';
speech.pitch = 1;
speech.rate = 1;
this.isSpeaking = true;
const voices = window.speechSynthesis.getVoices();
const preferred = [
'Google UK English Female',
'Google US English Female',
'Microsoft Zira - English (United States)',
'Microsoft Hazel - English (United Kingdom)',
'Google en-GB Female',
'Google en-US Female'
];
for (const n of preferred) {
const found = voices.find(v => v.name === n);
if (found) { speech.voice = found; break; }
}
if (!speech.voice && voices.length) speech.voice = voices[0];
speech.onend = () => {
this.isSpeaking = false;
this.cdr.detectChanges();
};
try {
window.speechSynthesis.speak(speech);
} catch {
this.isSpeaking = false;
}
}
resumeAudio(): void {
if (this.serverAudio && this.serverAudio.paused) {
this.serverAudio.play();
this.isAudioPaused = false;
if (this.serverAudioMessageIndex !== null) this.messages[this.serverAudioMessageIndex].isPlaying = true;
this.cdr.detectChanges();
return;
}
if (window.speechSynthesis && window.speechSynthesis.paused) {
window.speechSynthesis.resume();
this.isAudioPaused = false;
this.cdr.detectChanges();
}
}
playServerAudioForMessage(index: number): void {
const msg = this.messages[index] as any;
if (!msg || !msg.audioUrl) return;
// If same message is clicked while its audio element exists: toggle play/pause
if (this.serverAudio && this.serverAudioMessageIndex === index) {
if (!this.serverAudio.paused) {
this.serverAudio.pause();
this.isAudioPaused = true;
msg.isPlaying = false;
} else {
this.serverAudio.play().catch(err => {
console.error('Audio resume failed:', err);
});
this.isAudioPaused = false;
msg.isPlaying = true;
}
this.cdr.detectChanges();
return;
}
// Stop any inline videos (pauses DOM video elements) before starting audio
this.stopAllVideo();
// Stop any existing audio and speech synthesis
this.stopServerAudio();
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
// NOTE: Do NOT change global navbar toggles here.
// Previously this method forced `isMuted = false` and `isVoiceEnabled = true`
// which caused the navbar audio icon to flip when playing per-message audio.
// We intentionally avoid modifying those global flags so per-message playback
// doesn't affect the navbar state.
// Create and play new audio
this.serverAudio = new Audio(msg.audioUrl);
this.serverAudioMessageIndex = index;
try { this.serverAudio.volume = this.isMuted ? 0 : 1; } catch { /* noop */ }
this.isReadingIndex = index;
this.isAudioPaused = false;
msg.isPlaying = true;
this.isSpeaking = true;
this.cdr.detectChanges();
this.serverAudio.onended = () => {
try { msg.isPlaying = false; } catch { /* noop */ }
this.isReadingIndex = null;
this.serverAudio = null;
this.serverAudioMessageIndex = null;
this.isSpeaking = false;
this.cdr.detectChanges();
};
this.serverAudio.onerror = (e) => {
console.error('Server audio playback error', e);
try { msg.isPlaying = false; } catch { /* noop */ }
this.isReadingIndex = null;
this.serverAudio = null;
this.serverAudioMessageIndex = null;
this.isSpeaking = false;
this.cdr.detectChanges();
};
this.serverAudio.play().catch(err => {
console.error('Audio play failed:', err);
try { msg.isPlaying = false; } catch { /* noop */ }
this.isReadingIndex = null;
this.serverAudio = null;
this.serverAudioMessageIndex = null;
this.isSpeaking = false;
this.cdr.detectChanges();
});
}
private stopServerAudio(): void {
if (this.serverAudio) {
try { this.serverAudio.pause(); this.serverAudio.currentTime = 0; } catch { }
this.serverAudio = null;
if (this.serverAudioMessageIndex !== null && this.messages[this.serverAudioMessageIndex]) {
this.messages[this.serverAudioMessageIndex].isPlaying = false;
}
this.serverAudioMessageIndex = null;
}
this.isReadingIndex = null;
}
synthesizeAudioAndPlay(index: number): void {
const msg = this.messages[index] as any;
if (!msg || !msg.text) return;
if (msg.audioUrl) { this.playServerAudioForMessage(index); return; }
if (msg.isSynthesizing) return;
msg.isSynthesizing = true; this.cdr.detectChanges();
this.apiService.synthesizeAudio(msg.text).subscribe({
next: (res: any) => {
msg.isSynthesizing = false;
if (res?.audio_url) { msg.audioUrl = res.audio_url; this.playServerAudioForMessage(index); }
else { this.errorMessage = 'Audio generation failed.'; }
this.cdr.detectChanges();
},
error: (err) => { msg.isSynthesizing = false; this.errorMessage = 'Audio generation failed.'; this.cdr.detectChanges(); }
});
}
synthesizeVideoAndPlay(index: number): void {
const msg = this.messages[index] as any;
if (!msg || !msg.text) return;
if (msg.videoUrl) {
this.openMessageVideo(index);
return;
}
if (msg.isVideoSynthesizing) return;
msg.isVideoSynthesizing = true;
this.cdr.detectChanges();
this.apiService.synthesizeVideo(msg.text).subscribe({
next: (res: any) => {
msg.isVideoSynthesizing = false;
if (res?.video_url) {
msg.videoUrl = res.video_url;
this.openMessageVideo(index);
} else {
console.error('No video_url returned from synth API:', res);
this.errorMessage = 'Video generation failed.';
}
this.cdr.detectChanges();
},
error: (err) => {
msg.isVideoSynthesizing = false;
console.error('Video synth API error', err);
this.errorMessage = 'Video generation failed. Try again.';
this.cdr.detectChanges();
}
});
}
openMessageVideo(i: number, autoPlay: boolean = false): void {
const msg = this.messages[i] as any;
if (!msg?.videoUrl) return;
if (this.isVideoPlayingIndex === i) {
this.stopInlineVideo(i);
return;
}
// Stop any server audio and spoken TTS before playing video
this.stopServerAudio();
try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
// Stop other videos (will also pause DOM video elements)
this.stopAllVideo();
msg.playingVideoUrl = msg.videoUrl;
this.isVideoEnabledIndex[i] = true;
this.cdr.detectChanges();
setTimeout(() => {
const vid = document.getElementById(`inline-video-${i}`) as HTMLVideoElement | null;
if (!vid) { this.isVideoPlayingIndex = null; this.cdr.detectChanges(); return; }
// If autoplay requested, mute to satisfy browser autoplay policies.
if (autoPlay) {
try { vid.muted = true; } catch { /* noop */ }
} else {
try { vid.muted = false; } catch { /* noop */ }
}
vid.onplay = () => { this.isVideoPlayingIndex = i; this.cdr.detectChanges(); };
vid.onpause = () => { this.cdr.detectChanges(); };
vid.onended = () => { this.onMessageVideoEnded(i); };
// Attempt to play; if blocked, clear playing state
vid.play().catch(err => {
console.warn('Inline video play blocked/failed:', err);
this.isVideoPlayingIndex = null;
msg.playingVideoUrl = '';
this.isVideoEnabledIndex[i] = false;
this.cdr.detectChanges();
});
}, 50);
}
// New toggle helper called from the template button. It uses existing open/stop helpers.
toggleMessageVideo(i: number): void {
const msg = this.messages[i] as any;
if (!msg || !msg.videoUrl) return;
if (!this.isVideoEnabledIndex[i]) {
this.openMessageVideo(i);
} else {
this.stopInlineVideo(i);
}
}
stopInlineVideo(index: number): void {
const vid = document.getElementById(`inline-video-${index}`) as HTMLVideoElement | null;
if (vid) {
try { vid.pause(); } catch { /* noop */ }
try { vid.currentTime = 0; } catch { /* noop */ }
}
const msg = this.messages[index] as any;
if (msg) msg.playingVideoUrl = '';
this.isVideoEnabledIndex[index] = false;
if (this.isVideoPlayingIndex === index) this.isVideoPlayingIndex = null;
this.cdr.detectChanges();
}
onMessageVideoEnded(i: number): void {
const msg = this.messages[i] as any;
if (msg) msg.playingVideoUrl = '';
this.isVideoEnabledIndex[i] = false;
if (this.isVideoPlayingIndex === i) this.isVideoPlayingIndex = null;
this.cdr.detectChanges();
}
loadVoices(): void {
const voices = window.speechSynthesis.getVoices();
if (!voices.length) {
setTimeout(() => this.loadVoices(), 500);
return;
}
const preferred = [
"Google UK English Female",
"Google US English Female",
"Microsoft Zira - English (United States)",
"Microsoft Hazel - English (United Kingdom)",
"Google en-GB Female",
"Google en-US Female"
];
for (let name of preferred) {
const v = voices.find(voice => voice.name === name);
if (v) { this.selectedVoice = v; break; }
}
if (!this.selectedVoice) {
this.selectedVoice = voices.find(voice => voice.name.toLowerCase().includes("female")) || voices[0];
}
}
/** Global audio controls */
pauseAudio(): void {
if (window.speechSynthesis.speaking && !window.speechSynthesis.paused) {
window.speechSynthesis.pause();
this.isAudioPaused = true;
this.cdr.detectChanges();
}
}
stopListening(): void {
this.isListening = false;
if (this.recognition) this.recognition.stop();
}
/** Per-message read toggle */
toggleAudio(message: { text: string, isPlaying?: boolean }): void {
if (this.speechSynthesisInstance && this.speechSynthesisInstance.text === message.text) {
if (message.isPlaying) {
window.speechSynthesis.pause();
message.isPlaying = false;
} else {
window.speechSynthesis.resume();
message.isPlaying = true;
}
} else {
if (this.speechSynthesisInstance) window.speechSynthesis.cancel();
this.messages.forEach((m) => (m.isPlaying = false));
message.isPlaying = true;
this.speechSynthesisInstance = new SpeechSynthesisUtterance(message.text);
this.speechSynthesisInstance.lang = 'en-US';
this.speechSynthesisInstance.pitch = 1;
this.speechSynthesisInstance.rate = 1;
this.speechSynthesisInstance.onend = () => {
message.isPlaying = false;
this.speechSynthesisInstance = null;
};
window.speechSynthesis.speak(this.speechSynthesisInstance);
}
}
// UI / input helpers
goToHome(): void { this.router.navigate(['/home']); }
copySuccessIndex: number | null = null;
copyToClipboard(text: string, index: number): void {
navigator.clipboard.writeText(text).then(() => {
this.copySuccessIndex = index;
setTimeout(() => { this.copySuccessIndex = null; }, 2000);
}).catch(err => console.error('Failed to copy:', err));
}
checkInput = (): void => { this.isInputValid = this.userInput.trim().length > 0; }
handleButtonClick(): void {
// If AI is currently streaming/responding, clicking the button should cancel that response.
if (this.isAiResponding) {
this.stopAiResponse();
return;
}
if (this.userInput.trim().length > 0) {
this.showQuestions = false;
const msg = this.userInput;
this.userInput = '';
this.sendMessage(msg);
} else if (this.isSpeaking && !this.serverAudio) {
this.pauseAudio();
} else if (this.isAudioPaused) {
this.resumeAudio();
} else {
this.openMicrophonePopup();
}
}
startListening(): void {
this.isListening = true;
this.isProcessingSpeech = false;
if (navigator.mediaDevices?.getUserMedia) {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(() => {
if (this.recognition) {
this.recognition.start();
this.recognition.onresult = (event: any) => {
if (event.results && event.results[0]) {
const transcript = event.results[0][0].transcript.trim();
this.userInput = transcript;
if (this.userInput.trim()) this.sendMessage();
this.recognition.stop();
this.isListening = false;
}
};
this.recognition.onnomatch = () => alert('No speech detected. Please try again.');
this.recognition.onend = () => { this.isListening = false; };
this.recognition.onerror = (error: any) => {
console.error('Speech Recognition Error:', error);
this.isListening = false;
if (error.error === 'not-allowed') alert('Microphone permission denied.');
};
} else {
alert('Speech Recognition not supported in this browser.');
}
}).catch((error) => {
console.error('Microphone access denied:', error);
this.errorMessage = 'Please enable microphone access to use this feature.';
this.isListening = true;
});
} else {
alert('Microphone access not supported in this browser.');
}
}
private stopPopupListening(): void {
try {
if (this.recognition && typeof this.recognition.stop === 'function') {
this.recognition.stop();
}
} catch (e) {
console.warn('Error stopping popup recognition:', e);
}
this._restoreRecognitionHandlers();
this.isListening = false;
this.cdr.detectChanges();
}
private startPopupListening(): void {
this.isListening = true;
this.isProcessingSpeech = false;
this.popupTranscript = '';
this.errorMessage = '';
if (!this.recognition) {
this.errorMessage = 'Speech Recognition not available in this browser.';
this.isListening = false;
this.cdr.detectChanges();
return;
}
this._savedRecognitionOnResult = this.recognition.onresult;
this._savedInterimResults = Boolean(this.recognition.interimResults);
this.recognition.interimResults = true;
this.popupListeningMode = true;
if (navigator.mediaDevices?.getUserMedia) {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(() => {
try {
this.recognition.onresult = (event: any) => {
let interim = '';
let final = '';
for (let i = event.resultIndex; i < event.results.length; i++) {
const res = event.results[i];
if (res.isFinal) final += res[0].transcript + ' ';
else interim += res[0].transcript + ' ';
}
const display = (final + interim).trim();
this.popupTranscript = display;
this.cdr.detectChanges();
if (final && final.trim()) {
try { this.recognition.stop(); } catch { /* noop */ }
this.isListening = false;
this.cdr.detectChanges();
}
};
this.recognition.onnomatch = () => {
this.errorMessage = 'No speech detected. Please try again.';
this.isListening = false;
this.cdr.detectChanges();
};
this.recognition.onend = () => {
this.isListening = false;
this.popupListeningMode = false;
this._restoreRecognitionHandlers();
this.cdr.detectChanges();
};
this.recognition.onerror = (error: any) => {
console.error('Popup Speech Recognition Error:', error);
this.errorMessage = 'Speech recognition error.';
this.isListening = false;
this.popupListeningMode = false;
this._restoreRecognitionHandlers();
this.cdr.detectChanges();
};
this.recognition.start();
} catch (err) {
console.error('startPopupListening start error:', err);
this.errorMessage = 'Unable to start speech recognition.';
this.isListening = false;
this.popupListeningMode = false;
this._restoreRecognitionHandlers();
this.cdr.detectChanges();
}
}).catch((error) => {
console.error('Microphone access denied for popup:', error);
this.errorMessage = 'Please enable microphone access to use this feature.';
this.isListening = false;
this.popupListeningMode = false;
this._restoreRecognitionHandlers();
this.cdr.detectChanges();
});
} else {
this.errorMessage = 'Microphone access not supported in this browser.';
this.isListening = false;
this.popupListeningMode = false;
this._restoreRecognitionHandlers();
this.cdr.detectChanges();
}
}
private _restoreRecognitionHandlers(): void {
try {
if (!this.recognition) return;
if (this._savedRecognitionOnResult) {
this.recognition.onresult = this._savedRecognitionOnResult;
this._savedRecognitionOnResult = null;
}
this.recognition.interimResults = Boolean(this._savedInterimResults);
this._savedInterimResults = false;
this.popupListeningMode = false;
} catch (err) {
console.warn('Error restoring recognition handlers:', err);
}
}
addNewLine(event: KeyboardEvent): void {
if (event.key === 'Enter' && event.shiftKey) {
event.preventDefault();
this.userInput += '\n';
}
}
adjustTextareaHeight(event: Event): void {
const textarea = event.target as HTMLTextAreaElement;
textarea.style.height = 'auto';
textarea.style.height = `${textarea.scrollHeight}px`;
}
handleEnterPress(event: KeyboardEvent): void {
if (this.isSpeaking && !this.serverAudio) { event.preventDefault(); return; }
if (event.key === 'Enter') {
if (!event.shiftKey) {
event.preventDefault();
this.handleButtonClick();
} else {
event.preventDefault();
this.userInput += '\n';
}
}
}
isVoiceEnabled = false;
isTutorEnabled = false;
isSyllabusEnabled = true;
isBreadcrumbEnabled = false;
toggleVoice(): void {
// Toggle voice state and mute flag independently of tutor/video.
this.isVoiceEnabled = !this.isVoiceEnabled;
//this.isMuted = !this.isVoiceEnabled;
//// When turning voice OFF, stop voice TTS only — do not stop videos.
//if (!this.isVoiceEnabled) {
// this.stopVoiceOnly();
//}
//if (this.serverAudio) {
// try { this.serverAudio.volume = this.isMuted ? 0 : 1; } catch { /* noop */ }
// }
//this.saveToggleStates();
}
toggleTutor(): void {
// Toggle the global tutor/video mode and perform the appropriate actions.
this.isTutorEnabled = !this.isTutorEnabled;
}
toggleSyllabus(): void { this.isSyllabusEnabled = !this.isSyllabusEnabled; this.saveToggleStates(); }
toggleBreadcrumb(): void { this.isBreadcrumbEnabled = !this.isBreadcrumbEnabled; this.saveToggleStates(); }
private playGlobalVideoFromLatest(): void {
const idx = [...this.messages].reverse().findIndex(m => m.from === 'ai' && m.videoUrl);
if (idx === -1) { this.videoUrl = ''; return; }
const actualIndex = this.messages.length - 1 - idx;
const msg = this.messages[actualIndex];
this.videoUrl = msg.videoUrl || '';
this.cdr.detectChanges();
}
private saveToggleStates(): void {
if (isPlatformBrowser(this.platformId)) {
localStorage.setItem('voiceEnabled', String(this.isVoiceEnabled));
localStorage.setItem('tutorEnabled', String(this.isTutorEnabled));
localStorage.setItem('syllabusEnabled', String(this.isSyllabusEnabled));
localStorage.setItem('breadcrumbEnabled', String(this.isBreadcrumbEnabled));
}
}
private loadToggleStates(): void {
if (isPlatformBrowser(this.platformId)) {
this.isVoiceEnabled = localStorage.getItem('voiceEnabled') === 'true';
this.isTutorEnabled = localStorage.getItem('tutorEnabled') === 'true';
this.isSyllabusEnabled = localStorage.getItem('syllabusEnabled') !== 'false';
this.isBreadcrumbEnabled = localStorage.getItem('breadcrumbEnabled') === 'true';
}
}
private stopVoiceOnly(): void {
try { if (window.speechSynthesis.speaking || window.speechSynthesis.paused) window.speechSynthesis.cancel(); } catch { }
this.speechSynthesisInstance = null;
this.isSpeaking = false;
}
private stopAllAudioAndMute(): void {
this.stopServerAudio();
try { if (window.speechSynthesis.speaking || window.speechSynthesis.paused) window.speechSynthesis.cancel(); } catch { }
this.isVoiceEnabled = false;
this.isMuted = true;
this.messages.forEach(m => { m.isPlaying = false; });
this.isReadingIndex = null;
this.isSpeaking = false;
this.isAudioPaused = false;
this.cdr.detectChanges();
}
clearVideoUrl(): void {
this.videoUrl = '';
if (this.isTutorEnabled) {
this.isTutorEnabled = false;
this.saveToggleStates();
}
this.cdr.detectChanges();
}
private stopAllVideo(): void {
this.videoUrl = '';
// Pause any inline video elements in the DOM (prevents a video from continuing to play)
try {
const vids = Array.from(document.querySelectorAll<HTMLVideoElement>('[id^="inline-video-"]'));
vids.forEach(v => {
try { v.pause(); v.currentTime = 0; } catch { /* noop */ }
});
} catch (err) {
/* noop */
}
this.messages.forEach((m, idx) => {
m.playingVideoUrl = '';
this.isVideoEnabledIndex[idx] = false;
});
this.cdr.detectChanges();
}
getButtonIcon(): string {
if (this.isAiResponding) return 'assets/images/chat/stop.png';
if (this.serverAudio && !this.serverAudio.paused) return 'assets/images/chat/microphone-icon.png';
if (this.userInput.trim().length > 0) return 'assets/images/chat/send-icon.png';
if (this.isSpeaking && !this.serverAudio) return 'assets/images/chat/pause-icon.png';
if (this.isAudioPaused) return 'assets/images/chat/resume-icon.png';
return 'assets/images/chat/microphone-icon.png';
}
goToHomePageShortcut(): void { this.router.navigate(['/home']); }
openUserGuide(): void { this.showUserGuide = true; }
closeUserGuide(): void { this.showUserGuide = false; }
stopSpeaking(): void {
try {
if (window.speechSynthesis?.speaking || window.speechSynthesis?.paused) {
window.speechSynthesis.cancel();
}
} catch { /* noop */ }
this.speechSynthesisInstance = null;
try { this.stopServerAudio(); } catch { /* noop */ }
this.isSpeaking = false;
this.isAudioPaused = false;
if (this.aiResponseInterval) { clearInterval(this.aiResponseInterval); this.aiResponseInterval = null; }
this.isAiResponding = false;
this.cdr.detectChanges();
}
muteMicrophone(): void {
try {
if (this.recognition && typeof this.recognition.stop === 'function') {
this.recognition.stop();
}
} catch (e) {
console.warn('Error stopping recognition:', e);
}
this.isListening = false;
this.isProcessingSpeech = false;
this.errorMessage = '';
this.cdr.detectChanges();
}
openMicrophoneSettings(): void {
const ua = navigator.userAgent || '';
try {
if (ua.includes('Edg') || ua.includes('Edge')) {
window.open('edge://settings/content/microphone', '_blank');
} else if (ua.includes('Chrome') && !ua.includes('Chromium')) {
window.open('chrome://settings/content/microphone', '_blank');
} else if (ua.includes('Firefox')) {
window.open('about:preferences#privacy', '_blank');
} else if (/Safari/.test(ua) && /Macintosh/.test(navigator.platform)) {
alert('Open Safari → Settings (or Preferences) → Websites → Microphone to enable access.');
} else {
alert("Please check your browser's settings to enable the microphone.");
}
} catch (err) {
console.error('openMicrophoneSettings error', err);
alert("Unable to open settings automatically. Please check your browser's microphone/privacy settings.");
}
this.cdr.detectChanges();
}
private _isShortFollowup(text: string | undefined): boolean {
if (!text) return false;
const t = text.toLowerCase().replace(/[^\w\s?]/g, "").trim();
if (!t) return false;
if (/\b(example|examples|more examples|another example|give example|more)\b/.test(t)) {
return t.split(/\s+/).length <= 8;
}
return ["more", "more?", "another?", "another example?"].includes(t);
}
// Add this method inside the ChatComponent class (near other audio helpers)
stopReadAloud(): void {
// Stop any server audio and reset UI playback state
try { this.stopServerAudio(); } catch { /* noop */ }
this.isReadingIndex = null;
this.isSpeaking = false;
this.cdr.detectChanges();
}
openMicrophonePopup(): void {
this._recordingFinalBuffer = '';
this._recordingInterimBuffer = '';
this.popupTranscript = '';
this.errorMessage = '';
this.showMicPopup = true;
setTimeout(() => this.startRecording(), 200);
}
closeMicrophonePopup(): void {
this.stopRecording();
this.showMicPopup = false;
this.popupTranscript = '';
this._recordingFinalBuffer = '';
this._recordingInterimBuffer = '';
this.errorMessage = '';
this.cdr.detectChanges();
}
async startRecording(): Promise<void> {
if (!this.recognition) { this.errorMessage = 'Speech recognition not supported.'; return; }
this._recordingFinalBuffer = '';
this._recordingInterimBuffer = '';
this.popupTranscript = '';
this.errorMessage = '';
this.isRecording = true;
try { this.recognition.interimResults = true; } catch { }
try { this.recognition.start(); } catch (e) {
console.warn('Could not start recognition', e);
if (!this._recognitionActive) {
setTimeout(() => { try { this.recognition.start(); } catch { } }, 300);
}
}
try { await this.startAnalyzer(); } catch (err) { console.warn('Analyzer failed to start', err); }
this.cdr.detectChanges();
}
async stopRecording(): Promise<void> {
if (this._restartTimer) { clearTimeout(this._restartTimer); this._restartTimer = null; }
if (this.recognition && this.isRecording) { try { this.recognition.stop(); } catch { } }
try { this.stopAnalyzer(); } catch { /* noop */ }
this.isRecording = false;
const finalText = (this._recordingFinalBuffer || '').trim();
const interimText = (this._recordingInterimBuffer || '').trim();
const combinedRaw = (finalText + ' ' + interimText).trim();
if (!combinedRaw) {
this.popupTranscript = '';
this.cdr.detectChanges();
return;
}
this.popupTranscript = 'Processing…';
this.cdr.detectChanges();
let punctuated = combinedRaw;
try {
console.log('[VOICE] raw transcript:', combinedRaw);
// call backend at /rag/punctuate
punctuated = await this.punctuateText(combinedRaw);
console.log('[VOICE] punctuated result:', punctuated);
} catch (err) {
console.warn('Punctuation API failed', err);
punctuated = combinedRaw;
}
let normalized = this.normalizeTranscript(punctuated);
const hasTerminalPunctuation = /[.?!]$/.test(normalized);
const questionPattern = /^(who|what|when|where|why|how|which|whom|whose|is|are|am|was|were|do|does|did|can|could|would|will|shall|should|have|has|had)\b/i;
if (!hasTerminalPunctuation && questionPattern.test(combinedRaw)) {
normalized = normalized + '?';
}
this.popupTranscript = normalized;
this.cdr.detectChanges();
}
private extractAssistantContent(raw: string): string {
if (!raw) return raw;
try {
// Look for ChatCompletionMessage(content='...') or content="..."
const re1 = /message=ChatCompletionMessage\(\s*content=(['"])((?:\\.|(?!\1).)*)\1/;
const m1 = raw.match(re1);
if (m1 && m1[2]) return m1[2].replace(/\\'/g, "'").replace(/\\"/g, '"').trim();
const re2 = /ChatCompletionMessage\(\s*content=(['"])((?:\\.|(?!\1).)*)\1/;
const m2 = raw.match(re2);
if (m2 && m2[2]) return m2[2].replace(/\\'/g, "'").replace(/\\"/g, '"').trim();
// Fallback: any content='...' anywhere
const re3 = /content=(['"])((?:\\.|(?!\1).)*)\1/;
const m3 = raw.match(re3);
if (m3 && m3[2]) return m3[2].replace(/\\'/g, "'").replace(/\\"/g, '"').trim();
} catch (e) {
console.warn('extractAssistantContent error', e);
}
return raw.trim();
}
private async punctuateText(raw: string): Promise<string> {
if (!raw) return raw;
try {
// Use ApiService so backend URL is centralized
const resp$ = this.apiService.punctuate(raw);
const res = await lastValueFrom(resp$);
if (res && typeof res.punctuated === 'string' && res.punctuated.trim().length) {
const punctuated = res.punctuated.trim();
const extracted = this.extractAssistantContent(punctuated);
return extracted || punctuated;
}
} catch (err) {
console.warn('punctuateText error', err);
}
return raw;
}
confirmAndSendTranscript(): void {
let text = (this.popupTranscript || '').trim();
if (!text) { this.errorMessage = 'No speech captured'; return; }
text = this.normalizeTranscript(text);
// still emit in case other components listen
this.transcriptConfirmed.emit(text);
// Send the extracted/normalized text as a user message to the chat
try {
this.sendMessage(text);
} catch (e) {
console.warn('Failed to send transcript as message', e);
}
// reset popup state
this._recordingFinalBuffer = '';
this._recordingInterimBuffer = '';
this.showMicPopup = false;
this.isRecording = false;
if (this._restartTimer) { clearTimeout(this._restartTimer); this._restartTimer = null; }
this.popupTranscript = '';
this.cdr.detectChanges();
}
private normalizeTranscript(text: string): string {
if (!text) return text;
let t = text.trim();
const mappings: Array<[RegExp, string]> = [
[/\b(full stop|period|dot)\b/gi, '.'],
[/\b(question mark|question)\b/gi, '?'],
[/\b(exclamation mark|exclamation|exclaim)\b/gi, '!'],
[/\b(comma)\b/gi, ','],
[/\b(colon)\b/gi, ':'],
[/\b(semicolon)\b/gi, ';'],
[/\b(ellipsis|dot dot dot|three dots)\b/gi, '...'],
[/\b(new line|newline|new paragraph|line break)\b/gi, '\n'],
[/\b(open parenthesis|open bracket)\b/gi, '('],
[/\b(close parenthesis|close bracket)\b/gi, ')'],
[/\b(double quote|quote|quotation)\b/gi, '"'],
[/\b(single quote|apostrophe)\b/gi, "'"],
[/\b(dash|hyphen)\b/gi, '-'],
[/\b(percent|percent sign)\b/gi, '%'],
[/\b(and sign|ampersand)\b/gi, '&'],
[/\b(at sign)\b/gi, '@'],
[/\b(forward slash|slash)\b/gi, '/'],
[/\b(backslash)\b/gi, '\\\\']
];
for (const [re, rep] of mappings) t = t.replace(re, rep);
t = t.replace(/\s+([,.:;?!%'\)\]\}])/g, '$1');
t = t.replace(/\s+([\(\[\{"'`])/g, '$1');
t = t.replace(/([.?!:;,%\)\]'"-]{1,3})(?!\s|\n|$)/g, '$1 ');
t = t.replace(/[ \t]{2,}/g, ' ');
t = t.split('\n').map(line => line.trim()).join('\n');
t = t.replace(/(^|[\n\.!\?]\s+)([a-z])/g, (m, p1, p2) => p1 + p2.toUpperCase());
return t.trim();
}
// WebAudio analyzer (unchanged) — methods startAnalyzer, stopAnalyzer, drawWaveform, ngOnDestroy...
private async startAnalyzer(): Promise<void> {
if (!isPlatformBrowser(this.platformId)) return;
// Ensure canvas is available (rare race when popup just opened)
if (!this.waveformCanvas || !this.waveformCanvas.nativeElement) {
await new Promise(r => setTimeout(r, 80));
if (!this.waveformCanvas || !this.waveformCanvas.nativeElement) {
console.warn('Waveform canvas not available');
return;
}
}
try {
// Ensure we have a running AudioContext
if (!this.audioContext || (this.audioContext && this.audioContext.state === 'closed')) {
this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
} else if (this.audioContext.state === 'suspended') {
// resume if previously suspended
try { await this.audioContext.resume(); } catch { /* noop */ }
}
// Always request a fresh MediaStream for analyzer — it's cheap after permission is granted
if (this.mediaStream) {
try { this.mediaStream.getTracks().forEach(t => t.stop()); } catch { /* noop */ }
this.mediaStream = null;
}
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.mediaStream = stream;
// Create source and analyser
const source = this.audioContext.createMediaStreamSource(stream);
// Safely disconnect previous analyser if any
try { if (this.analyser) { try { this.analyser.disconnect(); } catch { } } } catch { /* noop */ }
const analyser = this.audioContext.createAnalyser();
analyser.fftSize = 2048;
analyser.smoothingTimeConstant = 0.85;
source.connect(analyser);
// assign to instance field
this.analyser = analyser;
const bufferLength = analyser.fftSize;
this.dataArray = new Uint8Array(bufferLength);
// Start drawing loop (will cancel any previous RAF)
this.drawWaveform();
} catch (err) {
console.warn('startAnalyzer error', err);
// clean partial state on failure
try { if (this.mediaStream) { this.mediaStream.getTracks().forEach(t => t.stop()); this.mediaStream = null; } } catch { /* noop */ }
try { if (this.analyser) { this.analyser.disconnect(); this.analyser = null; } } catch { /* noop */ }
this.dataArray = null;
// do not swallow error — caller can decide how to handle
throw err;
}
}
private stopAnalyzer(): void {
try {
if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
if (this.analyser) {
try { this.analyser.disconnect(); } catch { /* noop */ }
this.analyser = null;
}
if (this.mediaStream) {
try { this.mediaStream.getTracks().forEach(t => t.stop()); } catch { /* noop */ }
this.mediaStream = null;
}
// Close the AudioContext to fully reset state (async). Do not await here to avoid blocking UI.
try {
if (this.audioContext && typeof this.audioContext.close === 'function') {
this.audioContext.close().catch(() => { /* noop */ }).finally(() => { this.audioContext = null; });
} else {
this.audioContext = null;
}
} catch { this.audioContext = null; }
this.dataArray = null;
// Clear canvas
if (this.waveformCanvas && this.waveformCanvas.nativeElement) {
const c = this.waveformCanvas.nativeElement;
const ctx = c.getContext('2d');
if (ctx) ctx.clearRect(0, 0, c.width, c.height);
}
} catch (err) {
console.warn('stopAnalyzer error', err);
}
}
private drawWaveform(): void {
if (!this.waveformCanvas || !this.waveformCanvas.nativeElement || !this.analyser || !this.dataArray) return;
const canvas = this.waveformCanvas.nativeElement;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const dpr = window.devicePixelRatio || 1;
const resize = () => {
const rect = canvas.getBoundingClientRect();
const w = Math.max(1, Math.floor(rect.width * dpr));
const h = Math.max(1, Math.floor(rect.height * dpr));
if (canvas.width !== w || canvas.height !== h) { canvas.width = w; canvas.height = h; }
};
const render = () => {
// If popup was closed or analyzer removed, stop rendering
if (!this.waveformCanvas || !this.waveformCanvas.nativeElement || !this.analyser || !this.dataArray) {
if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
return;
}
// If canvas is not visible (e.g. popup hidden), stop loop to avoid wasted CPU and potential silent failures
// offsetParent is null for display:none, also check bounding rect sanity
const rect = canvas.getBoundingClientRect();
if (rect.width === 0 || rect.height === 0 || !canvas.offsetParent) {
if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
return;
}
resize();
try {
this.analyser.getByteTimeDomainData(this.dataArray);
} catch (e) {
// analyser may have been disconnected / audioContext closed mid-frame
console.warn('analyser.getByteTimeDomainData failed', e);
if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
return;
}
let sum = 0;
for (let i = 0; i < this.dataArray.length; i++) {
const v = this.dataArray[i] - 128;
sum += v * v;
}
const rms = Math.sqrt(sum / this.dataArray.length) / 128;
const level = Math.min(1, Math.max(0, rms));
ctx.clearRect(0, 0, canvas.width, canvas.height);
const baselineY = canvas.height / 2;
// faint baseline
ctx.save();
ctx.globalAlpha = 0.25;
ctx.strokeStyle = '#666';
ctx.lineWidth = Math.max(1, 1 * dpr);
ctx.setLineDash([2 * dpr, 3 * dpr]);
ctx.beginPath();
ctx.moveTo(0, baselineY);
ctx.lineTo(canvas.width, baselineY);
ctx.stroke();
ctx.setLineDash([]);
ctx.restore();
// waveform
ctx.lineWidth = Math.max(1, 1 * dpr);
ctx.strokeStyle = 'rgba(37,168,90,0.95)';
ctx.beginPath();
const slice = canvas.width / this.dataArray.length;
let x = 0;
for (let i = 0; i < this.dataArray.length; i++) {
const v = this.dataArray[i] / 128.0;
const y = (v * canvas.height) / 2;
const drawY = baselineY - (y - canvas.height / 2) * 0.6;
if (i === 0) ctx.moveTo(x, drawY); else ctx.lineTo(x, drawY);
x += slice;
}
ctx.stroke();
// highlight center bar that responds to level
const highlightMaxW = canvas.width * 0.7;
const highlightW = Math.max(2 * dpr, highlightMaxW * (0.05 + level * 0.95));
const hh = Math.max(4 * dpr, 6 * dpr);
const hx = (canvas.width - highlightW) / 2;
const hy = baselineY - hh / 2;
ctx.save();
ctx.globalAlpha = 0.18 + level * 0.3;
ctx.fillStyle = '#25a85a';
ctx.fillRect(hx - 6 * dpr, hy - 6 * dpr, highlightW + 12 * dpr, hh + 12 * dpr);
ctx.restore();
ctx.fillStyle = '#25a85a';
ctx.globalAlpha = 1;
ctx.fillRect(hx, hy, highlightW, hh);
this.animationFrameId = requestAnimationFrame(render);
};
// Start the loop (ensure any previous RAF is cancelled first)
if (this.animationFrameId) cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = requestAnimationFrame(render);
}
//ngOnDestroy(): void {
// if (this._restartTimer) { clearTimeout(this._restartTimer); this._restartTimer = null; }
// try { this.stopAnalyzer(); } catch { }
// if (this.recognition) {
// try { this.recognition.stop(); } catch { }
// }
//}
}