File size: 2,653 Bytes
bd03a8e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
// @ts-ignore
const SpeechRecognitionPolyfill: typeof webkitSpeechRecognition = typeof window !== 'undefined' ? (
// @ts-ignore
window.SpeechRecognition ||
window.webkitSpeechRecognition ||
// @ts-ignore
window.mozSpeechRecognition ||
// @ts-ignore
window.msSpeechRecognition ||
// @ts-ignore
window.oSpeechRecognition
) as typeof webkitSpeechRecognition : undefined
type subscriber = (msg: string, command?: string) => void
export class SR {
recognition?: SpeechRecognition
onchange?: subscriber
transcript: boolean = false
listening: boolean = false
private commandsRe?: RegExp
constructor(commands: string[]) {
this.recognition = SpeechRecognitionPolyfill ? new SpeechRecognitionPolyfill() : undefined
if (!this.recognition) {
return
}
this.configuration('zh-CN')
if (commands.length) {
this.commandsRe = new RegExp(`^(${commands.join('|')})。?$`)
}
this.recognition.onresult = this.speechRecognition
this.recognition.onerror = (err) => {
console.log('err', err.error)
this.stop()
}
this.recognition.onend = () => {
if (this.recognition && this.listening) {
this.recognition.start()
}
}
}
speechRecognition = (event: SpeechRecognitionEvent) => {
if (!this.listening) return
for (var i = event.resultIndex; i < event.results.length; i++) {
let result = event.results[i]
if (result.isFinal) {
var alt = result[0]
const text = alt.transcript.trim()
if (this.commandsRe && this.commandsRe.test(text)) {
return this.onchange?.('', RegExp.$1)
}
if (!this.transcript) return
this.onchange?.(text)
}
}
}
private configuration = async (lang: string = 'zh-CN') => {
return new Promise((resolve) => {
if (this.recognition) {
this.recognition.continuous = true
this.recognition.lang = lang
this.recognition.onstart = resolve
}
})
}
start = async () => {
if (this.recognition && !this.listening) {
await this.recognition.start()
this.transcript = true
this.listening = true
}
}
stop = () => {
if (this.recognition) {
this.recognition.stop()
this.transcript = false
this.listening = false
}
}
pause = () => {
if (this.recognition) {
this.transcript = false
}
}
resume = () => {
if (this.recognition) {
this.transcript = true
}
}
abort = () => {
if (this.recognition && this.transcript) {
this.recognition.abort()
this.transcript = false
this.listening = false
}
}
}
|