UI Overhaul and changing provider to SambaNova

#2
Files changed (6) hide show
  1. app.py +114 -2
  2. index.html +249 -64
  3. requirements.txt +1 -3
  4. script1.js +527 -1
  5. script2.js +582 -1
  6. secure.js +0 -1
app.py CHANGED
@@ -1,4 +1,116 @@
 
 
 
 
 
 
 
 
 
 
1
  import os
2
- from groq import Groq
3
 
4
- exec(os.environ.get('MainData'))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException, Request
2
+ from fastapi.responses import StreamingResponse
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ import aiohttp
5
+ import json
6
+ import time
7
+ import random
8
+ import ast
9
+ import urllib.parse
10
+ from apscheduler.schedulers.background import BackgroundScheduler
11
  import os
12
+ from pydantic import BaseModel
13
 
14
+ SAMBA_NOVA_API_KEY = os.environ.get("SAMBA_NOVA_API_KEY", None)
15
+
16
+ app = FastAPI()
17
+
18
+ # Time-Limited Infinite Cache
19
+ cache = {}
20
+ CACHE_DURATION = 120
21
+
22
+ # Function to clean up expired cache entries
23
+ def cleanup_cache():
24
+ current_time = time.time()
25
+ for key, (value, timestamp) in list(cache.items()):
26
+ if current_time - timestamp > CACHE_DURATION:
27
+ del cache[key]
28
+
29
+ # Initialize and start the scheduler
30
+ scheduler = BackgroundScheduler()
31
+ scheduler.add_job(cleanup_cache, 'interval', seconds=60) # Run cleanup every 60 seconds
32
+ scheduler.start()
33
+
34
+ class StreamTextRequest(BaseModel):
35
+ query: str
36
+ history: str = "[]"
37
+ model: str = "llama3-8b"
38
+ api_key: str = None
39
+
40
+ @app.post("/stream_text")
41
+ async def stream_text(request: StreamTextRequest):
42
+ current_time = time.time()
43
+ cache_key = (request.query, request.history, request.model)
44
+
45
+ # Check if the request is in the cache and not expired
46
+ if cache_key in cache:
47
+ cached_response, timestamp = cache[cache_key]
48
+ return StreamingResponse(iter([f"{cached_response}"]), media_type='text/event-stream')
49
+
50
+ # Model selection logic
51
+ if "405" in request.model:
52
+ fmodel = "Meta-Llama-3.1-405B-Instruct"
53
+ if "70" in request.model:
54
+ fmodel = "Meta-Llama-3.1-70B-Instruct"
55
+ else:
56
+ fmodel = "Meta-Llama-3.1-8B-Instruct"
57
+
58
+ system_message = """You are Voicee, a friendly and intelligent voice assistant created by KingNish. Your primary goal is to provide accurate, concise, and engaging responses while maintaining a positive and upbeat tone. Always aim to provide clear and relevant information that directly addresses the user's query, but feel free to sprinkle in a dash of humor—after all, laughter is the best app! Keep your responses brief and to the point, avoiding unnecessary details or tangents, unless they’re hilariously relevant. Use a friendly and approachable tone to create a pleasant interaction, and don’t shy away from a cheeky pun or two! Tailor your responses based on the user's input and previous interactions, ensuring a personalized experience that feels like chatting with a witty friend. Invite users to ask follow-up questions or clarify their needs, fostering a conversational flow that’s as smooth as butter on a hot pancake. Aim to put a smile on the user's face with light-hearted and fun responses, and be proactive in offering additional help or suggestions related to the user's query. Remember, your goal is to be the go-to assistant for users, making their experience enjoyable and informative—like a delightful dessert after a hearty meal!"""
59
+
60
+ messages = [{'role': 'system', 'content': system_message}]
61
+
62
+ messages.extend(ast.literal_eval(request.history))
63
+
64
+ messages.append({'role': 'user', 'content': request.query})
65
+
66
+ data = {'messages': messages, 'stream': True, 'model': fmodel}
67
+
68
+ api_key = request.api_key or SAMBA_NOVA_API_KEY
69
+
70
+ async def stream_response():
71
+ async with aiohttp.ClientSession() as session:
72
+ async with session.post('https://api.sambanova.ai/v1/chat/completions', headers = { 'Authorization': f'Bearer {api_key}', 'Content-Type': 'application/json' }, json=data) as response:
73
+ if response.status != 200:
74
+ raise HTTPException(status_code=response.status, detail="Error fetching AI response")
75
+
76
+ response_content = ""
77
+ async for line in response.content:
78
+ line = line.decode('utf-8').strip()
79
+ if line.startswith('data: {'):
80
+ json_data = line[6:]
81
+ try:
82
+ parsed_data = json.loads(json_data)
83
+ content = parsed_data.get("choices", [{}])[0].get("delta", {}).get("content", '')
84
+ if content:
85
+ content = content.replace("\n", " ")
86
+ response_content += f"data: {content}\n\n"
87
+ yield f"data: {content}\n\n"
88
+ except json.JSONDecodeError as e:
89
+ print(f"Error decoding JSON: {e}")
90
+ yield f"data: Error decoding JSON\n\n"
91
+
92
+ # Cache the full response
93
+ cache[cache_key] = (response_content, current_time)
94
+
95
+ return StreamingResponse(stream_response(), media_type='text/event-stream')
96
+
97
+
98
+
99
+ # Serve index.html from the same directory as your main.py file
100
+ from starlette.responses import FileResponse
101
+
102
+ @app.get("/script1.js")
103
+ async def script1_js():
104
+ return FileResponse("script1.js")
105
+
106
+ @app.get("/script2.js")
107
+ async def script2_js():
108
+ return FileResponse("script2.js")
109
+
110
+ @app.get("/")
111
+ async def read_index():
112
+ return FileResponse('index.html')
113
+
114
+ if __name__ == "__main__":
115
+ import uvicorn
116
+ uvicorn.run(app, host="0.0.0.0", port=7068, reload=True)
index.html CHANGED
@@ -1,66 +1,251 @@
1
  <!DOCTYPE html>
2
- <html lang=en>
3
- <head>
4
- <meta charset=UTF-8>
5
- <meta name=viewport content="width=device-width, initial-scale=1.0">
6
- <title>Voice Assistant</title>
7
- <link href=https://cdn.jsdelivr.net/npm/tailwindcss@2.2.19/dist/tailwind.min.css rel=stylesheet>
8
- <style>.transition-all{transition:all .3s ease-in-out}body.dark{background-color:#1a202c}body.dark .bg-white{background-color:#2d3748}body.dark .text-gray-900{color:#e2e8f0}body.dark .text-gray-700{color:#a0aec0}body.dark .bg-gray-100{background-color:#4a5568}body.dark .bg-gray-200{background-color:#718096}body.dark .bg-gradient-to-r{background-size:200% auto;background-position:left center}#darkmode-toggle span{transition:transform .3s ease-in-out}.hover:bg-gradient-to-r{background-size:200% auto;transition:background-position .5s ease-in-out}.hover:bg-gradient-to-r:hover{background-position:right center}svg{stroke:currentColor;stroke-width:2;stroke-linecap:round;stroke-linejoin:round}</style>
9
- <script>function loadScript(){var d=/Chrome/.test(navigator.userAgent)&&/Google Inc/.test(navigator.vendor)&&!/Edg/.test(navigator.userAgent);var b=window.innerWidth>768;var e=document.querySelector('script[src="script1.js"], script[src="script2.js"]');if(e){e.remove()}var a=document.createElement("script");var c=new Date().getTime();if(d&&b){a.src="script1.js?t="+c}else{a.src="script2.js?t="+c}a.onerror=function(){console.error("Error loading script:",a.src)};document.head.appendChild(a)}document.addEventListener("DOMContentLoaded",loadScript);document.addEventListener("DOMContentLoaded",loadScript);</script>
10
- </head>
11
- <body class="bg-gray-100 dark:bg-gray-900 transition-colors duration-300">
12
- <div class="container mx-auto px-4 py-16">
13
- <div class="bg-white dark:bg-gray-800 shadow-lg rounded-lg px-8 py-6 transition-all">
14
- <div class="flex items-center justify-between mb-6">
15
- <h1 class="text-3xl font-bold text-gray-900 dark:text-white">
16
- <span class="bg-gradient-to-r from-blue-500 to-purple-500 bg-clip-text text-transparent">Voice</span> Assistant
17
- </h1>
18
- <button id=darkmode-toggle class="relative w-12 h-6 rounded-full bg-gray-300 dark:bg-gray-700 transition-colors duration-300 focus:outline-none">
19
- <span class="absolute left-1 top-1 w-4 h-4 rounded-full bg-white dark:bg-gray-900 transform transition-transform duration-300"></span>
20
- </button>
21
- </div>
22
- <div id=responseTime class="text-sm text-gray-500 dark:text-gray-400 mb-4">Latency: 0ms</div>
23
- <div class="flex items-center justify-around mb-8">
24
- <div id=userIndicator class="rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-green-400 to-blue-500 hover:bg-gradient-to-r" data-content="User: Idle">
25
- <svg class="w-4 h-4 mr-2" viewBox="0 0 24 24"><path d="M12 14a7 7 0 017 7H5a7 7 0 017-7zm0-8a4 4 0 11-8 0 4 4 0 018 0z"></path></svg>
26
- <span>Idle</span>
27
- </div>
28
- <div id=aiIndicator class="rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-red-400 to-yellow-500 hover:bg-gradient-to-r" data-content="AI: Listening">
29
- <svg class="w-4 h-4 mr-2" viewBox="0 0 24 24"><path d="M12 15v2m-6 4h12a2 2 0 002-2v-6a2 2 0 00-2-2H6a2 2 0 00-2 2v6a2 2 0 002 2zm10-10V7a4 4 0 00-8 0v4h8z"></path></svg>
30
- <span>Listening</span>
31
- </div>
32
- </div>
33
- <div class="flex items-center justify-center mb-8">
34
- <button style=text-align:-webkit-center id=startStopButton class="bg-gradient-to-r from-blue-500 to-purple-600 hover:from-purple-600 hover:to-blue-500 text-white font-bold py-2 px-4 rounded transition-all focus:outline-none focus:shadow-outline">
35
- <svg id=microphoneIcon class="w-6 h-6 inline-block mr-2" viewBox="0 0 24 24"><path d="M12 14v2a4 4 0 004 4h0a4 4 0 004-4v-2M16 7a4 4 0 10-8 0v4h8z"></path></svg>
36
- <span>Start Listening</span>
37
- </button>
38
- </div>
39
- <div class=mt-8>
40
- <div class="grid grid-cols-1 md:grid-cols-2 gap-4">
41
- <div>
42
- <label for=voiceSelect class="block text-gray-700 dark:text-gray-300 font-bold mb-2">Voice:</label>
43
- <select id=voiceSelect class="w-full bg-gray-200 dark:bg-gray-700 border border-gray-300 dark:border-gray-600 text-gray-700 dark:text-gray-300 py-2 px-3 rounded focus:outline-none focus:shadow-outline">
44
- <option value=Amy>Female</option>
45
- <option value=Brian>Male</option>
46
- </select>
47
- </div>
48
- <div>
49
- <label for=modelSelect class="block text-gray-700 dark:text-gray-300 font-bold mb-2">Model:</label>
50
- <select id=modelSelect class="w-full bg-gray-200 dark:bg-gray-700 border border-gray-300 dark:border-gray-600 text-gray-700 dark:text-gray-300 py-2 px-3 rounded focus:outline-none focus:shadow-outline">
51
- <option value=8b>Fastest</option>
52
- <option value=70b>Powerful</option>
53
- </select>
54
- </div>
55
- <div class="flex items-center" style=visibility:hidden>
56
- <label for=noiseSuppression class="block text-gray-700 dark:text-gray-300 font-bold mb-2">Noise Suppression:</label>
57
- <input type=checkbox id=noiseSuppression checked class="ml-2 form-checkbox h-5 w-5 text-blue-500">
58
- </div>
59
- </div>
60
- </div>
61
- <div id=transcript class="mt-8 p-4 bg-gray-200 dark:bg-gray-700 rounded-lg text-sm font-mono whitespace-pre-wrap transition-all"></div>
62
- </div>
63
- </div>
64
- <script>document.addEventListener("DOMContentLoaded",function(){const a=document.getElementById("darkmode-toggle");function d(e){if(e==="dark"){document.body.classList.add("dark");a.querySelector("span").style.transform="translateX(24px)"}else{document.body.classList.remove("dark");a.querySelector("span").style.transform="translateX(0)"}localStorage.setItem("theme",e)}function b(){const e=document.body.classList.contains("dark");d(e?"light":"dark")}const c=localStorage.getItem("theme");d(c||"light");a.addEventListener("click",b)});</script>
65
- </body>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  </html>
 
1
  <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Voice Assistant</title>
7
+ <link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;700&display=swap" rel="stylesheet">
8
+ <style>
9
+ :root {
10
+ --primary-color: #4a90e2;
11
+ --secondary-color: #f39c12;
12
+ --background-color: #f0f4f8;
13
+ --card-bg-color: #ffffff;
14
+ --text-color: #333333;
15
+ --border-color: #e0e0e0;
16
+ }
17
+
18
+ body {
19
+ font-family: 'Roboto', sans-serif;
20
+ background-color: var(--background-color);
21
+ color: var(--text-color);
22
+ margin: 0;
23
+ padding: 0;
24
+ display: flex;
25
+ justify-content: center;
26
+ align-items: center;
27
+ min-height: 100vh;
28
+ }
29
+
30
+ .container {
31
+ width: 90%;
32
+ max-width: 800px;
33
+ }
34
+
35
+ .voice-assistant-card {
36
+ background-color: var(--card-bg-color);
37
+ border-radius: 20px;
38
+ box-shadow: 0 10px 30px rgba(0, 0, 0, 0.1);
39
+ padding: 40px;
40
+ text-align: center;
41
+ }
42
+
43
+ .title {
44
+ font-size: 2.5rem;
45
+ font-weight: 700;
46
+ margin-bottom: 20px;
47
+ color: var(--primary-color);
48
+ }
49
+
50
+ #responseTime {
51
+ font-size: 0.9rem;
52
+ color: #777;
53
+ margin-bottom: 20px;
54
+ }
55
+
56
+ .indicator-wrapper {
57
+ display: flex;
58
+ justify-content: space-around;
59
+ margin-bottom: 30px;
60
+ }
61
+
62
+ .indicator {
63
+ display: flex;
64
+ align-items: center;
65
+ padding: 10px 20px;
66
+ border-radius: 50px;
67
+ font-size: 1rem;
68
+ color: #fff;
69
+ transition: all 0.3s ease;
70
+ }
71
+
72
+ .indicator svg {
73
+ margin-right: 8px;
74
+ }
75
+
76
+ #userIndicator {
77
+ background-color: var(--primary-color);
78
+ }
79
+
80
+ #aiIndicator {
81
+ background-color: var(--secondary-color);
82
+ }
83
+
84
+ #startStopButton {
85
+ background-color: #38cb96;
86
+ color: #fff;
87
+ border: none;
88
+ padding: 15px 30px;
89
+ font-size: 1.2rem;
90
+ border-radius: 50px;
91
+ cursor: pointer;
92
+ transition: all 0.3s ease;
93
+ display: flex;
94
+ align-items: center;
95
+ justify-content: center;
96
+ margin: 0 auto 30px;
97
+ }
98
+
99
+ #startStopButton:hover {
100
+ background-color: #1e9b6e;
101
+ transform: translateY(-2px);
102
+ box-shadow: 0 5px 15px rgba(74, 144, 226, 0.3);
103
+ }
104
+
105
+ #startStopButton svg {
106
+ margin-right: 10px;
107
+ }
108
+
109
+ .settings {
110
+ display: grid;
111
+ grid-template-columns: 1fr 1fr 1.5fr;
112
+ gap: 20px;
113
+ margin-bottom: 30px;
114
+ }
115
+
116
+ .setting {
117
+ text-align: left;
118
+ position: relative;
119
+ /* Added for tooltip positioning */
120
+ }
121
+
122
+ .setting label {
123
+ display: block;
124
+ margin-bottom: 5px;
125
+ font-weight: 700;
126
+ color: var(--text-color);
127
+ }
128
+
129
+ select,
130
+ input[type="password"] {
131
+ width: 100%;
132
+ padding: 10px;
133
+ border: 1px solid var(--border-color);
134
+ border-radius: 5px;
135
+ font-size: 1rem;
136
+ background-color: #fff;
137
+ color: var(--text-color);
138
+ }
139
+
140
+ .tooltip {
141
+ display: none;
142
+ position: absolute;
143
+ background-color: #333;
144
+ color: #fff;
145
+ padding: 5px;
146
+ border-radius: 5px;
147
+ font-size: 0.8rem;
148
+ }
149
+
150
+ .setting:hover .tooltip {
151
+ display: block;
152
+ /* Show tooltip on hover */
153
+ }
154
+
155
+ #transcript {
156
+ background-color: #f9f9f9;
157
+ border-radius: 10px;
158
+ padding: 20px;
159
+ margin-top: 30px;
160
+ text-align: left;
161
+ font-family: 'Courier New', monospace;
162
+ white-space: pre-wrap;
163
+ max-height: 200px;
164
+ overflow-y: auto;
165
+ }
166
+
167
+ @media (max-width: 600px) {
168
+ .settings {
169
+ grid-template-columns: 1fr;
170
+ }
171
+ }
172
+ </style>
173
+ </head>
174
+ <body>
175
+ <div class="container">
176
+ <div class="voice-assistant-card">
177
+ <h1 class="title">Voice Assistant</h1>
178
+ <div id="responseTime">Latency: 0ms</div>
179
+ <div class="indicator-wrapper">
180
+ <div id="userIndicator" class="indicator">
181
+ <svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
182
+ <path d="M20 21v-2a4 4 0 0 0-4-4H8a4 4 0 0 0-4 4v2"></path>
183
+ <circle cx="12" cy="7" r="4"></circle>
184
+ </svg>
185
+ <span>User: Idle</span>
186
+ </div>
187
+ <div id="aiIndicator" class="indicator">
188
+ <svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
189
+ <polygon points="12 2 2 7 12 12 22 7 12 2"></polygon>
190
+ <polyline points="2 17 12 22 22 17"></polyline>
191
+ <polyline points="2 12 12 17 22 12"></polyline>
192
+ </svg>
193
+ <span>AI: Idle</span>
194
+ </div>
195
+ </div>
196
+ <button id="startStopButton">
197
+ <svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
198
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
199
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
200
+ <line x1="12" y1="19" x2="12" y2="23"></line>
201
+ <line x1="8" y1="23" x2="16" y2="23"></line>
202
+ </svg> Start Listening </button>
203
+ <div class="settings">
204
+ <div class="setting">
205
+ <label for="voiceSelect">Voice:</label>
206
+ <select id="voiceSelect">
207
+ <option value="Amy">Female</option>
208
+ <option value="Brian">Male</option>
209
+ </select>
210
+ <span class="tooltip">Select the voice type for the assistant.</span>
211
+ </div>
212
+ <div class="setting">
213
+ <label for="modelSelect">Model:</label>
214
+ <select id="modelSelect">
215
+ <option value="8b">Fastest</option>
216
+ <option value="70b">Powerful</option>
217
+ </select>
218
+ <span class="tooltip">Choose the model based on speed or power.</span>
219
+ </div>
220
+ <div class="setting">
221
+ <label for="apiKey">SambaNava API Key (optional):</label>
222
+ <input type="password" id="apiKey" placeholder="Enter your API Key">
223
+ <span class="tooltip">Use SambaNova API key for enhanced speed. You can obtain a free key from <a href="https://cloud.sambanova.ai/apis" style="color: #00f9f0;">https://cloud.sambanova.ai/apis</a>. </span>
224
+ </div>
225
+ </div>
226
+ <div id="transcript"></div>
227
+ </div>
228
+ </div>
229
+ <script>
230
+ function loadScript() {
231
+ var isChrome = /Chrome/.test(navigator.userAgent) && /Google Inc/.test(navigator.vendor) && !/Edg/.test(navigator.userAgent);
232
+ var isDesktop = window.innerWidth > 768;
233
+ var existingScript = document.querySelector('script[src="script1.js"], script[src="script2.js"]');
234
+ if (existingScript) {
235
+ existingScript.remove();
236
+ }
237
+ var script = document.createElement('script');
238
+ if (isChrome && isDesktop) {
239
+ script.src = 'script1.js';
240
+ } else {
241
+ script.src = 'script2.js';
242
+ }
243
+ script.onerror = function() {
244
+ console.error('Error loading script:', script.src);
245
+ };
246
+ document.head.appendChild(script);
247
+ }
248
+ document.addEventListener('DOMContentLoaded', loadScript);
249
+ </script>
250
+ </body>
251
  </html>
requirements.txt CHANGED
@@ -1,8 +1,6 @@
1
- groq
2
  fastapi
3
  starlette
4
  python-multipart
5
  uvicorn
6
  aiohttp
7
- apscheduler
8
- scikit-learn
 
 
1
  fastapi
2
  starlette
3
  python-multipart
4
  uvicorn
5
  aiohttp
6
+ apscheduler
 
script1.js CHANGED
@@ -1 +1,527 @@
1
- eval(function(p,a,c,k,e,r){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--)r[e(c)]=k[c]||e(c);k=[function(e){return r[e]}];e=function(){return'\\w+'};c=1};while(c--)if(k[c])p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c]);return p}('9 1h=W.X(\'1h\');9 2A=W.X(\'4r\');9 1i=W.X(\'4s\');9 4t=W.X(\'4u\');9 2B=W.X(\'4v\');9 2C=W.X(\'4w\');9 R=W.X(\'4x\');9 3r=W.X(\'1b\');b E;b 26=F;b 27=0;b Y=\'\';b Z=G;b M=G;b 1j=G;b 1c=F;b 4y=\'\';b 28=F;b 29="";b 1k=F;9 3s=1d;9 3t="4z://4A.4B.4C/4D/4E/4F";9 3u=S;b u=F;b 11=[];b 1S=[];9 1e=o 2D();9 1f=o 2D();9 3v=10;9 3w=4G;b v=[];9 1T=o 2D();9 3x=4H;9 1U=h=>h.1l().4I().2E(/[^\\w\\s]/g,\'\');9 2F=(13,l,2a,3y)=>`${13}-${l}-${2G.2H(2a)}-${3y}`;9 3A=(h,l)=>{9 13=1U(h);9 e=2F(13,l,v,1i.H);a(1f.2b(e)||1e.2b(e))1g;1S.1m({h:h.1l(),l,e});2I()};9 2I=1n()=>{2J(1S.m>0&&1f.4J<3v){9{h,l,e}=1S.3B();9 1o=o 3C();1f.2K(e,1o);1V{9 t=p 2L(`/3D?h=${2M(h)}&2a=${2G.2H(v)}&3E=${1i.H}`,{2N:\'2O\',3F:{\'3G\':\'z/T-2c\',\'1W-3H\':\'3I/3J\'},1p:1o.1p});a(!t.1q)1X o 15(\'2P t 2Q 2d 1q\');9 2R=p 2S(t.3K,l,1o.1p);a(2R)1e.2K(e,{I:2R,2e:N.O()})}1Y(f){a(f.3L!==\'2f\')J.f("15 4K U:",f)}2g{1f.2h(e);2I()}}};9 2S=1n(2i,l,2j)=>{9 1r=2i.3M();9 2k=o 3N("3O-8");b 17="";1V{2J(K){9{2l,H}=p 1r.3P();a(2l)2T;a(2j.2m)1X o 3Q(\'3R 2m\',\'2f\');9 2n=2k.3S(H,{2c:K});17+=2n;9 P=17.3T(\'\\n\');2o(b i=0;i<P.m-1;i++){9 j=P[i];a(j.2U(\'3U: \')){9 k=j.3V(6).1l();a(k){9 x=p 1Z(k,l);1g x}}}17=P[P.m-1]}}1Y(f){J.f("15 2p 2S:",f)}2g{1r.3W()}1g F};9 1s=1n()=>{a(11.m>0){9 3X=11.3B();9 U=o 4L(3X.I);Q();9 3Y=o 3Z(20=>{U.4M=20;U.40=20});a(u){u.41();u.42=0}u=U;p U.4N();p 3Y;1s()}L{Q()}};9 1Z=1n(z,l)=>{9 43=1U(z);9 e=`${43}-${l}`;a(1T.2b(e)){9 1t=1T.44(e);a(N.O()-1t.2e<3x){1g 1t.I}L{1T.2h(e)}}1V{9 t=p 2L(`${3t}?l=${l}&z=${2M(z)}`,{2N:\'2O\'});a(!t.1q)1X o 15(\'2P t 2Q 2d 1q\');9 45=p t.4O();9 x=4P.4Q(45);1T.2K(e,{I:x,2e:N.O()});1g x}1Y(f){J.f("15 4R 4S U:",f);1g F}};9 2V=1n(h)=>{J.21("4T h c 1u:",h);Z=K;Q();27=N.O();1k=F;9 13=1U(h);9 e=2F(13,1i.H,v,1i.H);a(1e.2b(e)){9 1t=1e.44(e);a(N.O()-1t.2e<3w){9 46=1t.I;11.1m({I:46,2q:K});1s()}L{1e.2h(e)}}1c=o 3C();9 I=`/3D?h=${2M(h)}&3E=${1i.H}&2a=${2G.2H(v)}`;1V{9 t=p 2L(I,{2N:\'2O\',3F:{\'3G\':\'z/T-2c\',\'1W-3H\':\'3I/3J\'},1p:1c.1p});a(!t.1q){a(t.47===4U){J.21("4V 4W 4X, 4Y 2p 1 4Z...");p o 3Z(20=>50(20,51));p 2V(h);1g}1X o 15(`2P t 2Q 2d 1q:${t.47}`)}J.21("52 U t 53");p 2W(t.3K,2A.H,1c.1p)}1Y(f){a(f.3L!==\'2f\'){J.f("15 54 h c 1u:",f)}}2g{Z=G;Q()}};9 2W=1n(2i,l,2j)=>{9 1r=2i.3M();9 2k=o 3N("3O-8");b 17="";b 2X=0;b 1v="";b 1w="";b 22="";b 2r="";1V{2J(K){9{2l,H}=p 1r.3P();a(2l)2T;a(2j.2m)1X o 3Q(\'3R 2m\',\'2f\');a(M){2s(\'48 55 56\');2T}9 2n=2k.3S(H,{2c:K});17+=2n;9 P=17.3T(\'\\n\');2o(b i=0;i<P.m-1;i++){9 j=P[i];a(j.2U(\'3U: \')){9 k=j.3V(6).1l();a(k){a(!1k)1k=N.O();1v+=k+" ";1w+=k+" ";22+=k+" ";3r.k=1w;a(2X<2){9 x=p 1Z(k,l);a(x){11.1m({I:x,2q:G});a(!u)1s()}2r+=k+" ";2X++}L{b 1x=22.2E(2r,\'\').1l();a(1x.m>=3u){9 x=p 1Z(1x,l);a(x){11.1m({I:x,2q:G});a(!u)1s()}22=""}}a(1v!==\'\'){1v=\'\'}}}}17=P[P.m-1]}}1Y(f){J.f("15 2p 2W:",f)}2g{1r.3W();b 1x=22.2E(2r,\'\').1l();a(1x!==""){9 x=p 1Z(1x,l);a(x){11.1m({I:x,2q:G});a(!u)1s()}}a(1v!==\'\'){1v=\'\'}a(1w!==\'\'){2Y(\'49\',1w);1w=\'\'}}};9 Q=(4a=F)=>{2C.k=M?"4b: 4c":"4b: 4d";2C.25=M?"1y 1z-1A 1B-4 1C-2 z-1D 1E 1F-1G 1H-1I 1J-S A-B-c-r y-2t-18 c-2t-2u 1K:A-B-c-r y-2t-1d c-2t-1L":"1y 1z-1A 1B-4 1C-2 z-1D 1E 1F-1G 1H-1I 1J-S A-B-c-r y-C-S c-C-18 2v:y-C-1L 2v:c-C-4e 1K:A-B-c-r y-C-18 c-C-1d";a(Z&&!u){R.k="1u: 57...";R.25="1y 1z-1A 1B-4 1C-2 z-1D 1E 1F-1G 1H-1I 1J-S A-B-c-r y-2w-18 c-2w-2u 1K:A-B-c-r y-2w-1d c-2w-1L"}L a(u&&!M){R.k=4a||"1u: 4c";R.25="1y 1z-1A 1B-4 1C-2 z-1D 1E 1F-1G 1H-1I 1J-S A-B-c-r y-2x-18 c-2x-2u 1K:A-B-c-r y-2x-1d c-2x-1L"}L a(M){R.k="1u: 2y";R.25="1y 1z-1A 1B-4 1C-2 z-1D 1E 1F-1G 1H-1I 1J-S A-B-c-r y-2z-18 c-2z-2u 1K:A-B-c-r y-2z-1d c-2z-1L"}L{R.k="1u: 4d";R.25="1y 1z-1A 1B-4 1C-2 z-1D 1E 1F-1G 1H-1I 1J-S A-B-c-r y-C-S c-C-18 2v:y-C-1L 2v:c-C-4e 1K:A-B-c-r y-C-18 c-C-1d"}};a(\'4f\'2p 58){E=o 4f();59.5a(E,{5b:K,5c:K,5d:\'5e-5f\',5g:3});E.5h=()=>{J.21("2Z 4g 5i");Y=\'\';M=K;28=N.O();Q();1h.30=\'<V 31="32://33.34.35/36/V" 1M="24" 37="24" 38="0 0 24 24" 39="3a" D="3b" D-1M="2" D-3c="1N" D-3d="1N"><q d="4h 4i-4j"></q><q d="3e 3f 3 0 0 0-3 3g 3 0 0 0 6 3h 3 0 0 0-3-3z"></q><q d="3i 3j 7 0 0 1-14 3k-2"></q><j 1O="12" 1P="19" 1Q="12" 1R="23"></j><j 1O="8" 1P="23" 1Q="16" 1R="23"></j></V> 4k 2y\'};E.5j=(T)=>{b 1a=\'\';2o(b i=T.5k;i<T.3l.m;i++){9 1b=T.3l[i][0].1b;a(T.3l[i].5l){Y+=1b;2s(\'5m\');3m(Y);Y=\'\';M=G;Q();27=N.O()}L{1a+=1b;M=K;28=N.O();Q();a(1a.m>29.m+5){4l(29)}29=1a;3A(1a,2A.H);a(Z&&4m(1a)){2s(\'5n\')}}}};E.40=(T)=>{J.f(\'2Z 4g f:\',T.f);a(1j)E.3n()};E.5o=()=>{M=G;Q();a(!Z&&Y!==\'\'){3m(Y);Y=\'\'}a(1j)E.3n()};1h.5p(\'5q\',()=>{a(1j){E.5r();1j=G;1h.30=\'<V 5s="5t" 31="32://33.34.35/36/V" 1M="24" 37="24" 38="0 0 24 24" 39="3a" D="3b" D-1M="2" D-3c="1N" D-3d="1N"><q d="3e 3f 3 0 0 0-3 3g 3 0 0 0 6 3h 3 0 0 0-3-3z"></q><q d="3i 3j 7 0 0 1-14 3k-2"></q><j 1O="12" 1P="19" 1Q="12" 1R="23"></j><j 1O="8" 1P="23" 1Q="16" 1R="23"></j></V> 5u 2y\'}L{E.3n();1j=K;1h.30=\'<V 31="32://33.34.35/36/V" 1M="24" 37="24" 38="0 0 24 24" 39="3a" D="3b" D-1M="2" D-3c="1N" D-3d="1N"><q d="4h 4i-4j"></q><q d="3e 3f 3 0 0 0-3 3g 3 0 0 0 6 3h 3 0 0 0-3-3z"></q><q d="3i 3j 7 0 0 1-14 3k-2"></q><j 1O="12" 1P="19" 1Q="12" 1R="23"></j><j 1O="8" 1P="23" 1Q="16" 1R="23"></j></V> 4k 2y\'}})}L{5v(\'5w 5x 5y 2d 5z 5A 5B 2Z 5C.\')}9 2Y=(3o,1W)=>{a(v.m>0&&v[v.m-1].3o===\'49\'&&v[v.m-1].1W===""){v.5D()}v.1m({3o,1W});a(v.m>6)v.5E(0,2)};9 3m=(1b)=>{9 3p=1b.5F();a(3p!==\'\'&&!Z){26=3p;2V(26);2Y(\'48\',26)}};9 4m=(1a)=>N.O()-28>3s||1a.m>5;9 2s=(3q=\'5G\')=>{J.21(`5H U(3q:${3q})...`);a(u){u.41();u.42=0;u=F}11.m=0;Z=G;a(1c){1c.4n();1c=F}1e.5I();1S.m=0;Q()};9 4l=(h)=>{9 13=1U(h);2o(9[e,1o]5J 1f){a(e.2U(13)){1o.4n();1f.2h(e)}}};9 4o=()=>{a(1k){9 4p=1k-27;2B.k=`4q:${4p}5K`}L{2B.k="4q: 5L"}};5M(4o,5N);',62,360,'|||||||||const|if|let|to||cacheKey|error||query||line|textContent|voice|length||new|await|path|||response|currentAudio|conversationHistory||audioUrl|from|text|bg|gradient|gray|stroke|speechRecognizer|null|false|value|url|console|true|else|isUserSpeaking|Date|now|lines|updateActivityIndicators|aiActivityIndicator|300|event|audio|svg|document|getElementById|completeTranscript|isRequestInProgress||audioPlaybackQueue||normalizedQuery||Error||buffer|400||interimTranscript|transcript|requestAbortController|500|prefetchCache|pendingPrefetchRequests|return|startStopButton|modelSelectionDropdown|isSpeechRecognitionActive|firstResponseTextTimestamp|trim|push|async|abortController|signal|ok|reader|playNextAudio|cachedData|AI|fullResponseText|fullResponseText2|unsentTextChunk|indicator|rounded|full|px|py|white|flex|items|center|transition|colors|duration|hover|700|width|round|x1|y1|x2|y2|prefetchQueue|audioCache|normalizeQueryText|try|content|throw|catch|generateTextToSpeechAudio|resolve|log|textChunk|||className|activeQuery|queryStartTime|lastUserSpeechTimestamp|prefetchTextQuery|history|has|stream|not|timestamp|AbortError|finally|delete|responseStream|abortSignal|decoder|done|aborted|chunk|for|in|isPrefetched|sentText|interruptAudioPlayback|blue|600|dark|purple|green|Listening|yellow|voiceSelectionDropdown|responseTimeDisplay|userActivityIndicator|Map|replace|generateCacheKey|JSON|stringify|processPrefetchQueue|while|set|fetch|encodeURIComponent|method|GET|Network|was|firstAudioUrl|handleStreamingResponseForPrefetch|break|startsWith|sendQueryToAI|handleStreamingResponse|initialChunksSent|addToConversationHistory|Speech|innerHTML|xmlns|http|www|w3|org|2000|height|viewBox|fill|none|currentColor|linecap|linejoin|M12|1a3|3v8a3|0V4a3|M19|10v2a7|0v|results|processSpeechTranscript|start|role|trimmedTranscript|reason|transcriptDiv|USER_SPEECH_INTERRUPT_DELAY|TEXT_TO_SPEECH_API_ENDPOINT|CHUNK_SIZE|MAX_PREFETCH_REQUESTS|prefetchCacheExpiration|audioCacheExpiration|modelName||prefetchFirstAudioChunk|shift|AbortController|stream_audio|model|headers|accept|type|application|json|body|name|getReader|TextDecoder|utf|read|DOMException|Request|decode|split|data|substring|releaseLock|audioData|audioPromise|Promise|onerror|pause|currentTime|normalizedText|get|audioBlob|prefetchedAudioUrl|status|user|assistant|state|User|Speaking|Idle|800|webkitSpeechRecognition|recognition|M9|9h6v6h|6z|Stop|cancelPrefetchRequests|shouldInterruptAudioPlayback|abort|updateLatency|latency|Latency|voiceSelect|modelSelect|noiseSuppressionCheckbox|noiseSuppression|responseTime|userIndicator|aiIndicator|partialTranscript|https|api|streamelements|com|kappa|v2|speech|60000|3600000|toLowerCase|size|prefetching|Audio|onended|play|blob|URL|createObjectURL|generating|TTS|Sending|429|Rate|limit|hit|retrying|second|setTimeout|1000|Streaming|received|sending|is|speaking|Processing|window|Object|assign|continuous|interimResults|language|en|US|maxAlternatives|onstart|started|onresult|resultIndex|isFinal|final|interim|onend|addEventListener|click|stop|id|microphoneIcon|Start|alert|Your|browser|does|support|the|Web|API|pop|splice|trimStart|unknown|Interrupting|clear|of|ms|0ms|setInterval|200'.split('|'),0,{}))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const startStopButton = document.getElementById('startStopButton');
2
+ const voiceSelectionDropdown = document.getElementById('voiceSelect');
3
+ const modelSelectionDropdown = document.getElementById('modelSelect');
4
+ const noiseSuppressionCheckbox = document.getElementById('noiseSuppression');
5
+ const responseTimeDisplay = document.getElementById('responseTime');
6
+ const userActivityIndicator = document.getElementById('userIndicator');
7
+ const aiActivityIndicator = document.getElementById('aiIndicator');
8
+ const transcriptDiv = document.getElementById('transcript');
9
+
10
+ let speechRecognizer;
11
+ let activeQuery = null;
12
+ let queryStartTime = 0;
13
+ let completeTranscript = '';
14
+ let isRequestInProgress = false;
15
+ let isUserSpeaking = false;
16
+ let isSpeechRecognitionActive = false;
17
+ let requestAbortController = null;
18
+ let partialTranscript = '';
19
+ let lastUserSpeechTimestamp = null;
20
+ let prefetchTextQuery = "";
21
+ let firstResponseTextTimestamp = null;
22
+
23
+ // Configuration
24
+ const USER_SPEECH_INTERRUPT_DELAY = 500;
25
+ const TEXT_TO_SPEECH_API_ENDPOINT = "https://api.streamelements.com/kappa/v2/speech";
26
+ const CHUNK_SIZE = 300;
27
+
28
+ // Audio Management
29
+ let currentAudio = null;
30
+ let audioPlaybackQueue = [];
31
+ let prefetchQueue = [];
32
+
33
+ // Enhanced Prefetching and Caching
34
+ const prefetchCache = new Map();
35
+ const pendingPrefetchRequests = new Map();
36
+ const MAX_PREFETCH_REQUESTS = 10;
37
+ const prefetchCacheExpiration = 60000; // 1 minute
38
+
39
+ // Global Conversation History
40
+ let conversationHistory = [];
41
+
42
+ // Audio Caching
43
+ const audioCache = new Map();
44
+ const audioCacheExpiration = 3600000; // 1 hour
45
+
46
+ // Normalize query text
47
+ const normalizeQueryText = query => query.trim().toLowerCase().replace(/[^\w\s]/g, '');
48
+
49
+ // Generate a cache key
50
+ const generateCacheKey = (normalizedQuery, voice, history, modelName) =>
51
+ `${normalizedQuery}-${voice}-${JSON.stringify(history)}-${modelName}`;
52
+
53
+ // Prefetch and cache the first TTS audio chunk
54
+ const prefetchFirstAudioChunk = (query, voice) => {
55
+ const normalizedQuery = normalizeQueryText(query);
56
+ const cacheKey = generateCacheKey(normalizedQuery, voice, conversationHistory, modelSelectionDropdown.value);
57
+
58
+ if (pendingPrefetchRequests.has(cacheKey) || prefetchCache.has(cacheKey)) return;
59
+
60
+ prefetchQueue.push({ query:query.trim(), voice, cacheKey });
61
+ processPrefetchQueue();
62
+ };
63
+
64
+ // Process the prefetch queue
65
+ const processPrefetchQueue = async () => {
66
+ while (prefetchQueue.length > 0 && pendingPrefetchRequests.size < MAX_PREFETCH_REQUESTS) {
67
+ const { query, voice, cacheKey } = prefetchQueue.shift();
68
+ const abortController = new AbortController();
69
+ pendingPrefetchRequests.set(cacheKey, abortController);
70
+
71
+ const url = '/stream_text';
72
+ const requestBody = {
73
+ query: query,
74
+ history: JSON.stringify(conversationHistory),
75
+ model: modelSelectionDropdown.value
76
+ };
77
+
78
+ try {
79
+ const response = await fetch(url, {
80
+ method: 'POST',
81
+ headers: {
82
+ 'Accept': 'text/event-stream',
83
+ 'Content-Type': 'application/json'
84
+ },
85
+ body: JSON.stringify(requestBody),
86
+ signal: abortController.signal
87
+ });
88
+
89
+ if (!response.ok) throw new Error('Network response was not ok');
90
+
91
+ const firstAudioUrl = await handleStreamingResponseForPrefetch(response.body, voice, abortController.signal);
92
+
93
+ if (firstAudioUrl) prefetchCache.set(cacheKey, { url: firstAudioUrl, timestamp: Date.now() });
94
+
95
+ } catch (error) {
96
+ if (error.name !== 'AbortError') console.error("Error prefetching audio:", error);
97
+ } finally {
98
+ pendingPrefetchRequests.delete(cacheKey);
99
+ processPrefetchQueue();
100
+ }
101
+ }
102
+ };
103
+
104
+ // Handle the streaming response for prefetching
105
+ const handleStreamingResponseForPrefetch = async (responseStream, voice, abortSignal) => {
106
+ const reader = responseStream.getReader();
107
+ const decoder = new TextDecoder("utf-8");
108
+ let buffer = "";
109
+
110
+ try {
111
+ while (true) {
112
+ const { done, value } = await reader.read();
113
+ if (done) break;
114
+ if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
115
+
116
+ const chunk = decoder.decode(value, { stream: true });
117
+ buffer += chunk;
118
+ const lines = buffer.split('\n');
119
+
120
+ for (let i = 0; i < lines.length - 1; i++) {
121
+ const line = lines[i];
122
+ if (line.startsWith('data: ')) {
123
+ const textContent = line.substring(6).trim();
124
+ if (textContent) {
125
+ const audioUrl = await generateTextToSpeechAudio(textContent, voice);
126
+ return audioUrl;
127
+ }
128
+ }
129
+ }
130
+
131
+ buffer = lines[lines.length - 1];
132
+ }
133
+ } catch (error) {
134
+ console.error("Error in handleStreamingResponseForPrefetch:", error);
135
+ } finally {
136
+ reader.releaseLock();
137
+ }
138
+
139
+ return null;
140
+ };
141
+
142
+ // Play audio from the queue
143
+ const playNextAudio = async () => {
144
+ if (audioPlaybackQueue.length > 0) {
145
+ const audioData = audioPlaybackQueue.shift();
146
+ const audio = new Audio(audioData.url);
147
+ updateActivityIndicators();
148
+
149
+ const audioPromise = new Promise(resolve => {
150
+ audio.onended = resolve;
151
+ audio.onerror = resolve;
152
+ });
153
+ if (currentAudio) {
154
+ currentAudio.pause();
155
+ currentAudio.currentTime = 0;
156
+ }
157
+
158
+ currentAudio = audio;
159
+ await audio.play();
160
+ await audioPromise;
161
+ playNextAudio();
162
+ } else {
163
+ updateActivityIndicators();
164
+ }
165
+ };
166
+
167
+ // Generate Text-to-Speech audio with caching
168
+ const generateTextToSpeechAudio = async (text, voice) => {
169
+ const normalizedText = normalizeQueryText(text);
170
+ const cacheKey = `${normalizedText}-${voice}`;
171
+
172
+ if (audioCache.has(cacheKey)) {
173
+ const cachedData = audioCache.get(cacheKey);
174
+ if (Date.now() - cachedData.timestamp < audioCacheExpiration) {
175
+ return cachedData.url;
176
+ } else {
177
+ audioCache.delete(cacheKey);
178
+ }
179
+ }
180
+
181
+ try {
182
+ const response = await fetch(`${TEXT_TO_SPEECH_API_ENDPOINT}?voice=${voice}&text=${encodeURIComponent(text)}`, { method: 'GET' });
183
+ if (!response.ok) throw new Error('Network response was not ok');
184
+ const audioBlob = await response.blob();
185
+ const audioUrl = URL.createObjectURL(audioBlob);
186
+
187
+ audioCache.set(cacheKey, { url: audioUrl, timestamp: Date.now() });
188
+ return audioUrl;
189
+ } catch (error) {
190
+ console.error("Error generating TTS audio:", error);
191
+ return null;
192
+ }
193
+ };
194
+
195
+ // Send a query to the AI
196
+ const sendQueryToAI = async (query) => {
197
+ console.log("Sending query to AI:", query);
198
+ isRequestInProgress = true;
199
+ updateActivityIndicators();
200
+ queryStartTime = Date.now();
201
+ firstResponseTextTimestamp = null;
202
+
203
+ const normalizedQuery = normalizeQueryText(query);
204
+ const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
205
+
206
+ if (prefetchCache.has(cacheKey)) {
207
+ const cachedData = prefetchCache.get(cacheKey);
208
+ if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
209
+ const prefetchedAudioUrl = cachedData.url;
210
+ audioPlaybackQueue.push({ url: prefetchedAudioUrl, isPrefetched: true });
211
+ playNextAudio();
212
+ } else {
213
+ prefetchCache.delete(cacheKey);
214
+ }
215
+ }
216
+
217
+ requestAbortController = new AbortController();
218
+
219
+ const url = '/stream_text';
220
+ const requestBody = {
221
+ query: query,
222
+ history: JSON.stringify(conversationHistory),
223
+ model: modelSelectionDropdown.value
224
+ };
225
+
226
+ try {
227
+ const response = await fetch(url, {
228
+ method: 'POST',
229
+ headers: {
230
+ 'Accept': 'text/event-stream',
231
+ 'Content-Type': 'application/json'
232
+ },
233
+ body: JSON.stringify(requestBody),
234
+ signal: requestAbortController.signal
235
+ });
236
+
237
+ if (!response.ok) {
238
+ if (response.status === 429) {
239
+ console.log("Rate limit hit, retrying in 1 second...");
240
+ await new Promise(resolve => setTimeout(resolve, 1000));
241
+ await sendQueryToAI(query);
242
+ return;
243
+ }
244
+ throw new Error(`Network response was not ok: ${response.status}`);
245
+ }
246
+
247
+ console.log("Streaming audio response received");
248
+ await handleStreamingResponse(response.body, voiceSelectionDropdown.value, requestAbortController.signal);
249
+ } catch (error) {
250
+ if (error.name !== 'AbortError') {
251
+ console.error("Error sending query to AI:", error);
252
+ }
253
+ } finally {
254
+ isRequestInProgress = false;
255
+ updateActivityIndicators();
256
+ }
257
+ };
258
+
259
+ // Handle the streaming audio response
260
+ const handleStreamingResponse = async (responseStream, voice, abortSignal) => {
261
+ const reader = responseStream.getReader();
262
+ const decoder = new TextDecoder("utf-8");
263
+ let buffer = "";
264
+ let initialChunksSent = 0;
265
+ let fullResponseText = "";
266
+ let fullResponseText2 = "";
267
+ let textChunk = "";
268
+ let sentText = "";
269
+
270
+ try {
271
+ while (true) {
272
+ const { done, value } = await reader.read();
273
+ if (done) break;
274
+ if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
275
+
276
+ if (isUserSpeaking) {
277
+ interruptAudioPlayback('user is speaking');
278
+ break;
279
+ }
280
+
281
+ const chunk = decoder.decode(value, { stream: true });
282
+ buffer += chunk;
283
+ const lines = buffer.split('\n');
284
+
285
+ for (let i = 0; i < lines.length - 1; i++) {
286
+ const line = lines[i];
287
+ if (line.startsWith('data: ')) {
288
+ const textContent = line.substring(6).trim();
289
+ if (textContent) {
290
+ if (!firstResponseTextTimestamp) firstResponseTextTimestamp = Date.now();
291
+
292
+ fullResponseText += textContent + " ";
293
+ fullResponseText2 += textContent + " ";
294
+ textChunk += textContent + " ";
295
+ transcriptDiv.textContent = fullResponseText2;
296
+
297
+ if (initialChunksSent < 2) {
298
+ const audioUrl = await generateTextToSpeechAudio(textContent, voice);
299
+ if (audioUrl) {
300
+ audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
301
+ if (!currentAudio) playNextAudio();
302
+ }
303
+ sentText += textContent + " ";
304
+ initialChunksSent++;
305
+ } else {
306
+ let unsentTextChunk = textChunk.replace(sentText, '').trim();
307
+
308
+ if (unsentTextChunk.length >= CHUNK_SIZE) {
309
+ const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
310
+ if (audioUrl) {
311
+ audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
312
+ if (!currentAudio) playNextAudio();
313
+ }
314
+ textChunk = "";
315
+ }
316
+ }
317
+
318
+ if (fullResponseText !== '') {
319
+ fullResponseText = '';
320
+ }
321
+ }
322
+ }
323
+ }
324
+
325
+ buffer = lines[lines.length - 1];
326
+ }
327
+ } catch (error) {
328
+ console.error("Error in handleStreamingResponse:", error);
329
+ } finally {
330
+ reader.releaseLock();
331
+
332
+ let unsentTextChunk = textChunk.replace(sentText, '').trim();
333
+ if (unsentTextChunk !== "") {
334
+ const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
335
+ if (audioUrl) {
336
+ audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
337
+ if (!currentAudio) playNextAudio();
338
+ }
339
+ }
340
+
341
+ if (fullResponseText !== '') {
342
+ fullResponseText = '';
343
+ }
344
+ if (fullResponseText2 !== '') {
345
+ addToConversationHistory('assistant', fullResponseText2);
346
+ fullResponseText2 = '';
347
+ }
348
+ }
349
+ };
350
+
351
+ // Update activity indicators
352
+ const updateActivityIndicators = (state = null) => {
353
+ userActivityIndicator.textContent = isUserSpeaking ? "User: Speaking" : "User: Idle";
354
+ userActivityIndicator.className = isUserSpeaking
355
+ ? "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-blue-400 to-blue-600 hover:bg-gradient-to-r from-blue-500 to-blue-700"
356
+ : "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
357
+
358
+ if (isRequestInProgress && !currentAudio) {
359
+ aiActivityIndicator.textContent = "AI: Processing...";
360
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-purple-400 to-purple-600 hover:bg-gradient-to-r from-purple-500 to-purple-700"; // Tailwind class for thinking
361
+ } else if (currentAudio && !isUserSpeaking) {
362
+ aiActivityIndicator.textContent = state || "AI: Speaking";
363
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-green-400 to-green-600 hover:bg-gradient-to-r from-green-500 to-green-700"; // Tailwind class for speaking
364
+ } else if (isUserSpeaking) {
365
+ aiActivityIndicator.textContent = "AI: Listening";
366
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-yellow-400 to-yellow-600 hover:bg-gradient-to-r from-yellow-500 to-yellow-700"; // Tailwind class for listening
367
+ } else {
368
+ aiActivityIndicator.textContent = "AI: Idle";
369
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
370
+ }
371
+ };
372
+
373
+
374
+ // Initialize speech recognition
375
+ if ('webkitSpeechRecognition' in window) {
376
+ speechRecognizer = new webkitSpeechRecognition();
377
+ Object.assign(speechRecognizer, {
378
+ continuous: true,
379
+ interimResults: true,
380
+ language: 'en-US',
381
+ maxAlternatives: 3
382
+ });
383
+
384
+ speechRecognizer.onstart = () => {
385
+ console.log("Speech recognition started");
386
+ completeTranscript = '';
387
+ isUserSpeaking = true;
388
+ lastUserSpeechTimestamp = Date.now();
389
+ updateActivityIndicators();
390
+ startStopButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M9 9h6v6h-6z"></path><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg> Stop Listening';
391
+ };
392
+
393
+ speechRecognizer.onresult = (event) => {
394
+ let interimTranscript = '';
395
+ for (let i = event.resultIndex; i < event.results.length; i++) {
396
+ const transcript = event.results[i][0].transcript;
397
+ if (event.results[i].isFinal) {
398
+ completeTranscript += transcript;
399
+ interruptAudioPlayback('final');
400
+ processSpeechTranscript(completeTranscript);
401
+ completeTranscript = '';
402
+ isUserSpeaking = false;
403
+ updateActivityIndicators();
404
+ queryStartTime = Date.now();
405
+ } else {
406
+ interimTranscript += transcript;
407
+ isUserSpeaking = true;
408
+ lastUserSpeechTimestamp = Date.now();
409
+ updateActivityIndicators();
410
+
411
+ if (interimTranscript.length > prefetchTextQuery.length + 5) {
412
+ cancelPrefetchRequests(prefetchTextQuery);
413
+ }
414
+ prefetchTextQuery = interimTranscript;
415
+ prefetchFirstAudioChunk(interimTranscript, voiceSelectionDropdown.value);
416
+
417
+ if (isRequestInProgress && shouldInterruptAudioPlayback(interimTranscript)) {
418
+ interruptAudioPlayback('interim');
419
+ }
420
+ }
421
+ }
422
+ };
423
+
424
+ speechRecognizer.onerror = (event) => {
425
+ console.error('Speech recognition error:', event.error);
426
+ if (isSpeechRecognitionActive) speechRecognizer.start();
427
+ };
428
+
429
+ speechRecognizer.onend = () => {
430
+ isUserSpeaking = false;
431
+ updateActivityIndicators();
432
+
433
+ if (!isRequestInProgress && completeTranscript !== '') {
434
+ processSpeechTranscript(completeTranscript);
435
+ completeTranscript = '';
436
+ }
437
+
438
+ if (isSpeechRecognitionActive) speechRecognizer.start();
439
+ };
440
+
441
+ startStopButton.addEventListener('click', () => {
442
+ if (isSpeechRecognitionActive) {
443
+ speechRecognizer.stop();
444
+ isSpeechRecognitionActive = false;
445
+ startStopButton.innerHTML = '<svg id="microphoneIcon" xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg> Start Listening';
446
+ } else {
447
+ speechRecognizer.start();
448
+ isSpeechRecognitionActive = true;
449
+ startStopButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M9 9h6v6h-6z"></path><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg> Stop Listening';
450
+ }
451
+ });
452
+ } else {
453
+ alert('Your browser does not support the Web Speech API.');
454
+ }
455
+
456
+ // Add to conversation history
457
+ const addToConversationHistory = (role, content) => {
458
+ if (conversationHistory.length > 0 &&
459
+ conversationHistory[conversationHistory.length - 1].role === 'assistant' &&
460
+ conversationHistory[conversationHistory.length - 1].content === "") {
461
+ conversationHistory.pop();
462
+ }
463
+
464
+ conversationHistory.push({ role, content });
465
+
466
+ if (conversationHistory.length > 6) conversationHistory.splice(0, 2);
467
+ };
468
+
469
+ // Process the final speech transcript
470
+ const processSpeechTranscript = (transcript) => {
471
+ const trimmedTranscript = transcript.trimStart();
472
+ if (trimmedTranscript !== '' && !isRequestInProgress) {
473
+ activeQuery = trimmedTranscript;
474
+ sendQueryToAI(activeQuery);
475
+ addToConversationHistory('user', activeQuery);
476
+ }
477
+ };
478
+
479
+ // Check if audio playback should be interrupted
480
+ const shouldInterruptAudioPlayback = (interimTranscript) =>
481
+ Date.now() - lastUserSpeechTimestamp > USER_SPEECH_INTERRUPT_DELAY || interimTranscript.length > 5;
482
+
483
+ // Interrupt audio playback
484
+ const interruptAudioPlayback = (reason = 'unknown') => {
485
+ console.log(`Interrupting audio (reason: ${reason})...`);
486
+ if (currentAudio) {
487
+ currentAudio.pause();
488
+ currentAudio.currentTime = 0;
489
+ currentAudio = null;
490
+ }
491
+
492
+ audioPlaybackQueue.length = 0;
493
+ isRequestInProgress = false;
494
+
495
+ if (requestAbortController) {
496
+ requestAbortController.abort();
497
+ requestAbortController = null;
498
+ }
499
+
500
+ prefetchCache.clear();
501
+ prefetchQueue.length = 0;
502
+ updateActivityIndicators();
503
+ };
504
+
505
+ // Cancel pending prefetch requests
506
+ const cancelPrefetchRequests = (query) => {
507
+ const normalizedQuery = normalizeQueryText(query);
508
+
509
+ for (const [cacheKey, abortController] of pendingPrefetchRequests) {
510
+ if (cacheKey.startsWith(normalizedQuery)) {
511
+ abortController.abort();
512
+ pendingPrefetchRequests.delete(cacheKey);pendingPrefetchRequests.delete(cacheKey);
513
+ }
514
+ }
515
+ };
516
+
517
+ // Update latency display
518
+ const updateLatency = () => {
519
+ if (firstResponseTextTimestamp) {
520
+ const latency = firstResponseTextTimestamp - queryStartTime;
521
+ responseTimeDisplay.textContent = `Latency: ${latency}ms`;
522
+ } else {
523
+ responseTimeDisplay.textContent = "Latency: 0ms";
524
+ }
525
+ };
526
+
527
+ setInterval(updateLatency, 200);
script2.js CHANGED
@@ -1 +1,582 @@
1
- eval(function(p,a,c,k,e,r){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--)r[e(c)]=k[c]||e(c);k=[function(e){return r[e]}];e=function(){return'\\w+'};c=1};while(c--)if(k[c])p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c]);return p}('9 X=1a.1b(\'X\');9 2U=1a.1b(\'4r\');9 1p=1a.1b(\'4s\');9 4t=1a.1b(\'4u\');9 2V=1a.1b(\'4v\');9 2W=1a.1b(\'4w\');9 Y=1a.1b(\'4x\');9 2X=1a.1b(\'1j\');e y;e 2s=Q;e 2t=0;e 1c=\'\';e Z=D;e U=D;e E=D;e 1k=Q;e 4y=\'\';e 2Y=Q;e 2u="";e 1q=Q;9 4z=1l;9 3v="4A://4B.4C.4D/4E/4F/4G";9 3w=11;e v=Q;e 1d=[];e 2d=[];9 1m=q 2Z();9 1n=q 2Z();9 3x=10;9 3y=4H;e z=[];9 2e=q 2Z();9 3A=4I;9 2f=l=>l.1r().4J().30(/[^\\w\\s]/g,\'\');9 31=(1e,p,2v,3B)=>`${1e}-${p}-${32.33(2v)}-${3B}`;9 3C=(l,p)=>{9 1e=2f(l);9 j=31(1e,p,z,1p.R);a(1n.2w(j)||1m.2w(j))1o;2d.1s({l:l.1r(),p,j});34()};9 34=1t()=>{35(2d.t>0&&1n.4K<3x){9{l,p,j}=2d.3D();9 1u=q 3E();1n.36(j,1u);2g{9 x=u 37(`/3F?l=${38(l)}&2v=${32.33(z)}&3G=${1p.R}`,{39:\'3a\',3H:{\'3I\':\'F/13-2x\',\'2h-3J\':\'3K/3L\'},1v:1u.1v});a(!x.1w)2i q 1f(\'3b x 3c 2y 1w\');9 3d=u 3e(x.3M,p,1u.1v);a(3d)1m.36(j,{S:3d,2z:15.17()})}2j(k){a(k.3N!==\'2A\')T.k("1f 4L 18:",k)}2B{1n.2C(j);34()}}};9 3e=1t(2D,p,2E)=>{9 1x=2D.3O();9 2F=q 3P("3Q-8");e 1g="";2g{35(C){9{2G,R}=u 1x.3R();a(2G)3f;a(2E.2H)2i q 3S(\'3T 2H\',\'2A\');9 2I=2F.3U(R,{2x:C});1g+=2I;9 V=1g.3V(\'\\n\');2J(e i=0;i<V.t-1;i++){9 b=V[i];a(b.3g(\'3W: \')){9 m=b.3X(6).1r();a(m){9 A=u 2k(m,p);1o A}}}1g=V[V.t-1]}}2j(k){T.k("1f 2K 3e:",k)}2B{1x.3Y()}1o Q};9 1y=1t()=>{a(1d.t>0){9 3Z=1d.3D();9 18=q 4M(3Z.S);W();a(E){y.40();E=D;X.1z=`<o 1A="1B://1C.1D.1E/1F/o"G="24"1G="24"1H="0 0 24 24"1I="1J"h="1K"h-G="2"h-1L="H"h-1M="H"><c d="1N 1O 3 0 0 0-3 1P 3 0 0 0 6 1Q 3 0 0 0-3-3z"></c><c d="1R 1S 7 0 0 1-14 1T-2"></c><b I="12"J="19"K="12"L="23"></b><b I="8"J="23"K="16"L="23"></b></o>41 1h`}9 42=q 43(2l=>{18.4N=2l;18.44=2l});a(v){v.45();v.46=0}v=18;u 18.4O();u 42;1y()}M{W();47(()=>{a(!E){y.2m();E=C;X.1z=`<o 1A="1B://1C.1D.1E/1F/o"G="24"1G="24"1H="0 0 24 24"1I="1J"h="1K"h-G="2"h-1L="H"h-1M="H"><c d="3h 3i-3j"></c><c d="1N 1O 3 0 0 0-3 1P 3 0 0 0 6 1Q 3 0 0 0-3-3z"></c><c d="1R 1S 7 0 0 1-14 1T-2"></c><b I="12"J="19"K="12"L="23"></b><b I="8"J="23"K="16"L="23"></b></o>3k 2n`}},4P)}};9 2k=1t(F,p)=>{9 48=2f(F);9 j=`${48}-${p}`;a(2e.2w(j)){9 1U=2e.49(j);a(15.17()-1U.2z<3A){1o 1U.S}M{2e.2C(j)}}2g{9 x=u 37(`${3v}?p=${p}&F=${38(F)}`,{39:\'3a\'});a(!x.1w)2i q 1f(\'3b x 3c 2y 1w\');9 4a=u x.4Q();9 A=4R.4S(4a);2e.36(j,{S:A,2z:15.17()});1o A}2j(k){T.k("1f 4T 4U 18:",k);1o Q}};9 3l=1t(l)=>{T.2o("4V l f 1h:",l);Z=C;W();2t=15.17();1q=Q;9 1e=2f(l);9 j=31(1e,1p.R,z,1p.R);a(1m.2w(j)){9 1U=1m.49(j);a(15.17()-1U.2z<3y){9 4b=1U.S;1d.1s({S:4b,2L:C});1y()}M{1m.2C(j)}}1k=q 3E();9 S=`/3F?l=${38(l)}&3G=${1p.R}&2v=${32.33(z)}`;2g{9 x=u 37(S,{39:\'3a\',3H:{\'3I\':\'F/13-2x\',\'2h-3J\':\'3K/3L\'},1v:1k.1v});a(!x.1w){a(x.4c===4W){T.2o("4X 4Y 4Z, 50 2K 1 51...");u q 43(2l=>47(2l,52));u 3l(l);1o}2i q 1f(`3b x 3c 2y 1w:${x.4c}`)}T.2o("53 18 x 54");u 3m(x.3M,2U.R,1k.1v)}2j(k){a(k.3N!==\'2A\'){T.k("1f 55 l f 1h:",k)}}2B{Z=D;W()}};9 3m=1t(2D,p,2E)=>{9 1x=2D.3O();9 2F=q 3P("3Q-8");e 1g="";e 3n=0;e 1V="";e 2p="";e 2M="";2g{35(C){9{2G,R}=u 1x.3R();a(2G)3f;a(2E.2H)2i q 3S(\'3T 2H\',\'2A\');a(U){2N(\'4d 56 57\');3f}9 2I=2F.3U(R,{2x:C});1g+=2I;9 V=1g.3V(\'\\n\');2J(e i=0;i<V.t-1;i++){9 b=V[i];a(b.3g(\'3W: \')){9 m=b.3X(6).1r();a(m){a(!1q)1q=15.17();1V+=m+" ";2p+=m+" ";2X.m=1V;a(3n<2){9 A=u 2k(m,p);a(A){1d.1s({S:A,2L:D});a(!v)1y()}2M+=m+" ";3n++}M{e 1W=2p.30(2M,\'\').1r();a(1W.t>=3w){9 A=u 2k(1W,p);a(A){1d.1s({S:A,2L:D});a(!v)1y()}2p=""}}}}}1g=V[V.t-1]}}2j(k){T.k("1f 2K 3m:",k)}2B{1x.3Y();e 1W=2p.30(2M,\'\').1r();a(1W!==""){9 A=u 2k(1W,p);a(A){1d.1s({S:A,2L:D});a(!v)1y()}}a(1V!==\'\'){3o(\'4e\',1V);1V=\'\'}}};9 W=(4f=Q)=>{2W.m=U?"4g: 4h":"4g: 4i";2W.2q=U?"1X 1Y-1Z 20-4 21-2 F-22 25 26-27 28-29 2a-11 N-O-f-r B-2O-1i f-2O-2P 2b:N-O-f-r B-2O-1l f-2O-2c":"1X 1Y-1Z 20-4 21-2 F-22 25 26-27 28-29 2a-11 N-O-f-r B-P-11 f-P-1i 2Q:B-P-2c 2Q:f-P-4j 2b:N-O-f-r B-P-1i f-P-1l";a(Z&&!v){Y.m="1h: 58...";Y.2q="1X 1Y-1Z 20-4 21-2 F-22 25 26-27 28-29 2a-11 N-O-f-r B-2R-1i f-2R-2P 2b:N-O-f-r B-2R-1l f-2R-2c"}M a(v&&!U){Y.m=4f||"1h: 4h";Y.2q="1X 1Y-1Z 20-4 21-2 F-22 25 26-27 28-29 2a-11 N-O-f-r B-2S-1i f-2S-2P 2b:N-O-f-r B-2S-1l f-2S-2c"}M a(U){Y.m="1h: 2n";Y.2q="1X 1Y-1Z 20-4 21-2 F-22 25 26-27 28-29 2a-11 N-O-f-r B-2T-1i f-2T-2P 2b:N-O-f-r B-2T-1l f-2T-2c"}M{Y.m="1h: 4i";Y.2q="1X 1Y-1Z 20-4 21-2 F-22 25 26-27 28-29 2a-11 N-O-f-r B-P-11 f-P-1i 2Q:B-P-2c 2Q:f-P-4j 2b:N-O-f-r B-P-1i f-P-1l"}};a(\'4k\'2K 59){y=q 4k();5a.5b(y,{5c:C,5d:C,5e:\'5f-5g\',5h:3});y.5i=()=>{T.2o("3p 4l 5j");1c=\'\';U=C;2Y=15.17();W();X.1z=`<o 1A="1B://1C.1D.1E/1F/o"G="24"1G="24"1H="0 0 24 24"1I="1J"h="1K"h-G="2"h-1L="H"h-1M="H"><c d="3h 3i-3j"></c><c d="1N 1O 3 0 0 0-3 1P 3 0 0 0 6 1Q 3 0 0 0-3-3z"></c><c d="1R 1S 7 0 0 1-14 1T-2"></c><b I="12"J="19"K="12"L="23"></b><b I="8"J="23"K="16"L="23"></b></o>3k 2n`};y.5k=(13)=>{e 2r=\'\';2J(e i=13.5l;i<13.3q.t;i++){9 1j=13.3q[i][0].1j;a(13.3q[i].5m){1c+=1j;2N(\'5n\');3r(1c);1c=\'\';U=D;W();2t=15.17()}M{2r+=1j;U=C;2Y=15.17();W();a(2r.t>2u.t+5){4m(2u)}2u=2r;3C(2r,2U.R)}}};y.44=(13)=>{T.k(\'3p 4l k:\',13.k);a(E)y.2m()};y.5o=()=>{U=D;W();a(!Z&&1c!==\'\'){3r(1c);1c=\'\'}a(E)y.2m()};X.5p(\'5q\',()=>{a(E&&!Z){y.40();E=D;X.1z=`<o 1A="1B://1C.1D.1E/1F/o"G="24"1G="24"1H="0 0 24 24"1I="1J"h="1K"h-G="2"h-1L="H"h-1M="H"><c d="1N 1O 3 0 0 0-3 1P 3 0 0 0 6 1Q 3 0 0 0-3-3z"></c><c d="1R 1S 7 0 0 1-14 1T-2"></c><b I="12"J="19"K="12"L="23"></b><b I="8"J="23"K="16"L="23"></b></o>5r 2n`}M a(E&&Z||v){2N(\'5s 5t\');y.2m();E=C;X.1z=`<o 1A="1B://1C.1D.1E/1F/o"G="24"1G="24"1H="0 0 24 24"1I="1J"h="1K"h-G="2"h-1L="H"h-1M="H"><c d="1N 1O 3 0 0 0-3 1P 3 0 0 0 6 1Q 3 0 0 0-3-3z"></c><c d="1R 1S 7 0 0 1-14 1T-2"></c><b I="12"J="19"K="12"L="23"></b><b I="8"J="23"K="16"L="23"></b></o>41 1h`}M{y.2m();E=C;X.1z=`<o 1A="1B://1C.1D.1E/1F/o"G="24"1G="24"1H="0 0 24 24"1I="1J"h="1K"h-G="2"h-1L="H"h-1M="H"><c d="3h 3i-3j"></c><c d="1N 1O 3 0 0 0-3 1P 3 0 0 0 6 1Q 3 0 0 0-3-3z"></c><c d="1R 1S 7 0 0 1-14 1T-2"></c><b I="12"J="19"K="12"L="23"></b><b I="8"J="23"K="16"L="23"></b></o>3k 2n`}})}M{5u(\'5v 5w 5x 2y 5y 5z 5A 3p 5B.\')}9 3o=(3s,2h)=>{a(z.t>0&&z[z.t-1].3s===\'4e\'&&z[z.t-1].2h===""){z.5C()}z.1s({3s,2h});a(z.t>6)z.5D(0,2)};9 3r=(1j)=>{9 3t=1j.5E();a(3t!==\'\'&&!Z){2s=3t;3l(2s);3o(\'4d\',2s);2X.m=\'\'}};9 2N=(3u=\'5F\')=>{T.2o(`5G 18(3u:${3u})...`);a(v){v.45();v.46=0;v=Q}1d.t=0;Z=D;a(1k){1k.4n();1k=Q}1m.5H();2d.t=0;W()};9 4m=(l)=>{9 1e=2f(l);2J(9[j,1u]5I 1n){a(j.3g(1e)){1u.4n();1n.2C(j)}}};9 4o=()=>{a(1q){9 4p=1q-2t;2V.m=`4q:${4p}5J`}M{2V.m="4q: 5K"}};5L(4o,5M);',62,359,'|||||||||const|if|line|path||let|to||stroke||cacheKey|error|query|textContent||svg|voice|new|||length|await|currentAudio||response|speechRecognizer|conversationHistory|audioUrl|from|true|false|isSpeechRecognitionActive|text|width|round|x1|y1|x2|y2|else|bg|gradient|gray|null|value|url|console|isUserSpeaking|lines|updateActivityIndicators|startStopButton|aiActivityIndicator|isRequestInProgress||300||event||Date||now|audio||document|getElementById|completeTranscript|audioPlaybackQueue|normalizedQuery|Error|buffer|AI|400|transcript|requestAbortController|500|prefetchCache|pendingPrefetchRequests|return|modelSelectionDropdown|firstResponseTextTimestamp|trim|push|async|abortController|signal|ok|reader|playNextAudio|innerHTML|xmlns|http|www|w3|org|2000|height|viewBox|fill|none|currentColor|linecap|linejoin|M12|1a3|3v8a3|0V4a3|M19|10v2a7|0v|cachedData|fullResponseText|unsentTextChunk|indicator|rounded|full|px|py|white|||flex|items|center|transition|colors|duration|hover|700|prefetchQueue|audioCache|normalizeQueryText|try|content|throw|catch|generateTextToSpeechAudio|resolve|start|Listening|log|textChunk|className|interimTranscript|activeQuery|queryStartTime|prefetchTextQuery|history|has|stream|not|timestamp|AbortError|finally|delete|responseStream|abortSignal|decoder|done|aborted|chunk|for|in|isPrefetched|sentText|interruptAudioPlayback|blue|600|dark|purple|green|yellow|voiceSelectionDropdown|responseTimeDisplay|userActivityIndicator|transcriptDiv|lastUserSpeechTimestamp|Map|replace|generateCacheKey|JSON|stringify|processPrefetchQueue|while|set|fetch|encodeURIComponent|method|GET|Network|was|firstAudioUrl|handleStreamingResponseForPrefetch|break|startsWith|M9|9h6v6h|6z|Stop|sendQueryToAI|handleStreamingResponse|initialChunksSent|addToConversationHistory|Speech|results|processSpeechTranscript|role|trimmedTranscript|reason|TEXT_TO_SPEECH_API_ENDPOINT|CHUNK_SIZE|MAX_PREFETCH_REQUESTS|prefetchCacheExpiration||audioCacheExpiration|modelName|prefetchFirstAudioChunk|shift|AbortController|stream_audio|model|headers|accept|type|application|json|body|name|getReader|TextDecoder|utf|read|DOMException|Request|decode|split|data|substring|releaseLock|audioData|stop|Interrupt|audioPromise|Promise|onerror|pause|currentTime|setTimeout|normalizedText|get|audioBlob|prefetchedAudioUrl|status|user|assistant|state|User|Speaking|Idle|800|webkitSpeechRecognition|recognition|cancelPrefetchRequests|abort|updateLatency|latency|Latency|voiceSelect|modelSelect|noiseSuppressionCheckbox|noiseSuppression|responseTime|userIndicator|aiIndicator|partialTranscript|USER_SPEECH_INTERRUPT_DELAY|https|api|streamelements|com|kappa|v2|speech|60000|3600000|toLowerCase|size|prefetching|Audio|onended|play|100|blob|URL|createObjectURL|generating|TTS|Sending|429|Rate|limit|hit|retrying|second|1000|Streaming|received|sending|is|speaking|Processing|window|Object|assign|continuous|interimResults|language|en|US|maxAlternatives|onstart|started|onresult|resultIndex|isFinal|final|onend|addEventListener|click|Start|button|interrupt|alert|Your|browser|does|support|the|Web|API|pop|splice|trimStart|unknown|Interrupting|clear|of|ms|0ms|setInterval|200'.split('|'),0,{}))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const startStopButton = document.getElementById('startStopButton');
2
+ const voiceSelectionDropdown = document.getElementById('voiceSelect');
3
+ const modelSelectionDropdown = document.getElementById('modelSelect');
4
+ const noiseSuppressionCheckbox = document.getElementById('noiseSuppression');
5
+ const responseTimeDisplay = document.getElementById('responseTime');
6
+ const userActivityIndicator = document.getElementById('userIndicator');
7
+ const aiActivityIndicator = document.getElementById('aiIndicator');
8
+ const transcriptDiv = document.getElementById('transcript');
9
+
10
+ let speechRecognizer;
11
+ let activeQuery = null;
12
+ let queryStartTime = 0;
13
+ let completeTranscript = '';
14
+ let isRequestInProgress = false;
15
+ let isUserSpeaking = false;
16
+ let isSpeechRecognitionActive = false;
17
+ let requestAbortController = null;
18
+ let partialTranscript = '';
19
+ let lastUserSpeechTimestamp = null;
20
+ let prefetchTextQuery = "";
21
+ let firstResponseTextTimestamp = null;
22
+
23
+ // Configuration
24
+ const USER_SPEECH_INTERRUPT_DELAY = 500;
25
+ const TEXT_TO_SPEECH_API_ENDPOINT = "https://api.streamelements.com/kappa/v2/speech";
26
+ const CHUNK_SIZE = 300;
27
+
28
+ // Audio Management
29
+ let currentAudio = null;
30
+ let audioPlaybackQueue = [];
31
+ let prefetchQueue = [];
32
+
33
+ // Enhanced Prefetching and Caching
34
+ const prefetchCache = new Map();
35
+ const pendingPrefetchRequests = new Map();
36
+ const MAX_PREFETCH_REQUESTS = 10;
37
+ const prefetchCacheExpiration = 60000; // 1 minute
38
+
39
+ // Global Conversation History
40
+ let conversationHistory = [];
41
+
42
+ // Audio Caching
43
+ const audioCache = new Map();
44
+ const audioCacheExpiration = 3600000; // 1 hour
45
+
46
+ // Normalize query text
47
+ const normalizeQueryText = query => query.trim().toLowerCase().replace(/[^\w\s]/g, '');
48
+
49
+ // Generate a cache key
50
+ const generateCacheKey = (normalizedQuery, voice, history, modelName) =>
51
+ `${normalizedQuery}-${voice}-${JSON.stringify(history)}-${modelName}`;
52
+
53
+ // Prefetch and cache the first TTS audio chunk
54
+ const prefetchFirstAudioChunk = (query, voice) => {
55
+ const normalizedQuery = normalizeQueryText(query);
56
+ const cacheKey = generateCacheKey(normalizedQuery, voice, conversationHistory, modelSelectionDropdown.value);
57
+
58
+ if (pendingPrefetchRequests.has(cacheKey) || prefetchCache.has(cacheKey)) return;
59
+
60
+ prefetchQueue.push({ query:query.trim(), voice, cacheKey });
61
+ processPrefetchQueue();
62
+ };
63
+
64
+ // Process the prefetch queue
65
+ const processPrefetchQueue = async () => {
66
+ while (prefetchQueue.length > 0 && pendingPrefetchRequests.size < MAX_PREFETCH_REQUESTS) {
67
+ const { query, voice, cacheKey } = prefetchQueue.shift();
68
+ const abortController = new AbortController();
69
+ pendingPrefetchRequests.set(cacheKey, abortController);
70
+
71
+ const url = '/stream_text';
72
+ const requestBody = {
73
+ query: query,
74
+ history: JSON.stringify(conversationHistory),
75
+ model: modelSelectionDropdown.value
76
+ };
77
+
78
+ try {
79
+ const response = await fetch(url, {
80
+ method: 'POST',
81
+ headers: {
82
+ 'Accept': 'text/event-stream',
83
+ 'Content-Type': 'application/json'
84
+ },
85
+ body: JSON.stringify(requestBody),
86
+ signal: abortController.signal
87
+ });
88
+
89
+ if (!response.ok) throw new Error('Network response was not ok');
90
+
91
+ const firstAudioUrl = await handleStreamingResponseForPrefetch(response.body, voice, abortController.signal);
92
+
93
+ if (firstAudioUrl) prefetchCache.set(cacheKey, { url: firstAudioUrl, timestamp: Date.now() });
94
+
95
+ } catch (error) {
96
+ if (error.name !== 'AbortError') console.error("Error prefetching audio:", error);
97
+ } finally {
98
+ pendingPrefetchRequests.delete(cacheKey);
99
+ processPrefetchQueue();
100
+ }
101
+ }
102
+ };
103
+
104
+ // Handle the streaming response for prefetching
105
+ const handleStreamingResponseForPrefetch = async (responseStream, voice, abortSignal) => {
106
+ const reader = responseStream.getReader();
107
+ const decoder = new TextDecoder("utf-8");
108
+ let buffer = "";
109
+
110
+ try {
111
+ while (true) {
112
+ const { done, value } = await reader.read();
113
+ if (done) break;
114
+ if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
115
+
116
+ const chunk = decoder.decode(value, { stream: true });
117
+ buffer += chunk;
118
+ const lines = buffer.split('\n');
119
+
120
+ for (let i = 0; i < lines.length - 1; i++) {
121
+ const line = lines[i];
122
+ if (line.startsWith('data: ')) {
123
+ const textContent = line.substring(6).trim();
124
+ if (textContent) {
125
+ const audioUrl = await generateTextToSpeechAudio(textContent, voice);
126
+ return audioUrl;
127
+ }
128
+ }
129
+ }
130
+
131
+ buffer = lines[lines.length - 1];
132
+ }
133
+ } catch (error) {
134
+ console.error("Error in handleStreamingResponseForPrefetch:", error);
135
+ } finally {
136
+ reader.releaseLock();
137
+ }
138
+
139
+ return null;
140
+ };
141
+
142
+ // Play audio from the queue
143
+ const playNextAudio = async () => {
144
+ if (audioPlaybackQueue.length > 0) {
145
+ const audioData = audioPlaybackQueue.shift();
146
+ const audio = new Audio(audioData.url);
147
+ updateActivityIndicators();
148
+
149
+ // Pause speech recognition if it's active
150
+ if (isSpeechRecognitionActive) {
151
+ speechRecognizer.stop();
152
+ isSpeechRecognitionActive = false;
153
+ startStopButton.innerHTML = `
154
+ <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
155
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
156
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
157
+ <line x1="12" y1="19" x2="12" y2="23"></line>
158
+ <line x1="8" y1="23" x2="16" y2="23"></line>
159
+ </svg>
160
+ Interrupt AI
161
+ `;
162
+ }
163
+
164
+ const audioPromise = new Promise(resolve => {
165
+ audio.onended = resolve;
166
+ audio.onerror = resolve;
167
+ });
168
+ if (currentAudio) {
169
+ currentAudio.pause();
170
+ currentAudio.currentTime = 0;
171
+ }
172
+
173
+ currentAudio = audio;
174
+ await audio.play();
175
+ await audioPromise;
176
+ playNextAudio();
177
+ } else {
178
+ updateActivityIndicators();
179
+
180
+ // Resume speech recognition if it was paused with a delay
181
+ setTimeout(() => {
182
+ if (!isSpeechRecognitionActive) {
183
+ speechRecognizer.start();
184
+ isSpeechRecognitionActive = true;
185
+ startStopButton.innerHTML = `
186
+ <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
187
+ <path d="M9 9h6v6h-6z"></path>
188
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
189
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
190
+ <line x1="12" y1="19" x2="12" y2="23"></line>
191
+ <line x1="8" y1="23" x2="16" y2="23"></line>
192
+ </svg>
193
+ Stop Listening
194
+ `;
195
+ }
196
+ }, 100);
197
+ }
198
+ };
199
+
200
+ // Generate Text-to-Speech audio with caching
201
+ const generateTextToSpeechAudio = async (text, voice) => {
202
+ const normalizedText = normalizeQueryText(text);
203
+ const cacheKey = `${normalizedText}-${voice}`;
204
+
205
+ if (audioCache.has(cacheKey)) {
206
+ const cachedData = audioCache.get(cacheKey);
207
+ if (Date.now() - cachedData.timestamp < audioCacheExpiration) {
208
+ return cachedData.url;
209
+ } else {
210
+ audioCache.delete(cacheKey);
211
+ }
212
+ }
213
+
214
+ try {
215
+ const response = await fetch(`${TEXT_TO_SPEECH_API_ENDPOINT}?voice=${voice}&text=${encodeURIComponent(text)}`, { method: 'GET' });
216
+ if (!response.ok) throw new Error('Network response was not ok');
217
+ const audioBlob = await response.blob();
218
+ const audioUrl = URL.createObjectURL(audioBlob);
219
+
220
+ audioCache.set(cacheKey, { url: audioUrl, timestamp: Date.now() });
221
+ return audioUrl;
222
+ } catch (error) {
223
+ console.error("Error generating TTS audio:", error);
224
+ return null;
225
+ }
226
+ };
227
+
228
+ // Send a query to the AI
229
+ const sendQueryToAI = async (query) => {
230
+ console.log("Sending query to AI:", query);
231
+ isRequestInProgress = true;
232
+ updateActivityIndicators();
233
+ queryStartTime = Date.now();
234
+ firstResponseTextTimestamp = null;
235
+
236
+ const normalizedQuery = normalizeQueryText(query);
237
+ const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
238
+
239
+ if (prefetchCache.has(cacheKey)) {
240
+ const cachedData = prefetchCache.get(cacheKey);
241
+ if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
242
+ const prefetchedAudioUrl = cachedData.url;
243
+ audioPlaybackQueue.push({ url: prefetchedAudioUrl, isPrefetched: true });
244
+ playNextAudio();
245
+ } else {
246
+ prefetchCache.delete(cacheKey);
247
+ }
248
+ }
249
+
250
+ requestAbortController = new AbortController();
251
+
252
+ const url = '/stream_text';
253
+ const requestBody = {
254
+ query: query,
255
+ history: JSON.stringify(conversationHistory),
256
+ model: modelSelectionDropdown.value
257
+ };
258
+
259
+ try {
260
+ const response = await fetch(url, {
261
+ method: 'POST',
262
+ headers: {
263
+ 'Accept': 'text/event-stream',
264
+ 'Content-Type': 'application/json'
265
+ },
266
+ body: JSON.stringify(requestBody),
267
+ signal: requestAbortController.signal
268
+ });
269
+
270
+ if (!response.ok) {
271
+ if (response.status === 429) {
272
+ console.log("Rate limit hit, retrying in 1 second...");
273
+ await new Promise(resolve => setTimeout(resolve, 1000));
274
+ await sendQueryToAI(query);
275
+ return;
276
+ }
277
+ throw new Error(`Network response was not ok: ${response.status}`);
278
+ }
279
+
280
+ console.log("Streaming audio response received");
281
+ await handleStreamingResponse(response.body, voiceSelectionDropdown.value, requestAbortController.signal);
282
+ } catch (error) {
283
+ if (error.name !== 'AbortError') {
284
+ console.error("Error sending query to AI:", error);
285
+ }
286
+ } finally {
287
+ isRequestInProgress = false;
288
+ updateActivityIndicators();
289
+ }
290
+ };
291
+
292
+ // Handle the streaming audio response
293
+ const handleStreamingResponse = async (responseStream, voice, abortSignal) => {
294
+ const reader = responseStream.getReader();
295
+ const decoder = new TextDecoder("utf-8");
296
+ let buffer = "";
297
+ let initialChunksSent = 0;
298
+ let fullResponseText = "";
299
+ let textChunk = "";
300
+ let sentText = "";
301
+
302
+ try {
303
+ while (true) {
304
+ const { done, value } = await reader.read();
305
+ if (done) break;
306
+ if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
307
+
308
+ if (isUserSpeaking) {
309
+ interruptAudioPlayback('user is speaking');
310
+ break;
311
+ }
312
+
313
+ const chunk = decoder.decode(value, { stream: true });
314
+ buffer += chunk;
315
+ const lines = buffer.split('\n');
316
+
317
+ for (let i = 0; i < lines.length - 1; i++) {
318
+ const line = lines[i];
319
+ if (line.startsWith('data: ')) {
320
+ const textContent = line.substring(6).trim();
321
+ if (textContent) {
322
+ if (!firstResponseTextTimestamp) firstResponseTextTimestamp = Date.now();
323
+
324
+ fullResponseText += textContent + " ";
325
+ textChunk += textContent + " ";
326
+ transcriptDiv.textContent = fullResponseText; // Update transcriptDiv
327
+
328
+ if (initialChunksSent < 2) {
329
+ const audioUrl = await generateTextToSpeechAudio(textContent, voice);
330
+ if (audioUrl) {
331
+ audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
332
+ if (!currentAudio) playNextAudio();
333
+ }
334
+ sentText += textContent + " ";
335
+ initialChunksSent++;
336
+ } else {
337
+ let unsentTextChunk = textChunk.replace(sentText, '').trim();
338
+
339
+ if (unsentTextChunk.length >= CHUNK_SIZE) {
340
+ const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
341
+ if (audioUrl) {
342
+ audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
343
+ if (!currentAudio) playNextAudio();
344
+ }
345
+ textChunk = "";
346
+ }
347
+ }
348
+ }
349
+ }
350
+ }
351
+
352
+ buffer = lines[lines.length - 1];
353
+ }
354
+ } catch (error) {
355
+ console.error("Error in handleStreamingResponse:", error);
356
+ } finally {
357
+ reader.releaseLock();
358
+
359
+ let unsentTextChunk = textChunk.replace(sentText, '').trim();
360
+ if (unsentTextChunk !== "") {
361
+ const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
362
+ if (audioUrl) {
363
+ audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
364
+ if (!currentAudio) playNextAudio();
365
+ }
366
+ }
367
+
368
+ if (fullResponseText !== '') {
369
+ addToConversationHistory('assistant', fullResponseText);
370
+ fullResponseText = ''; // Clear fullResponseText for the next response
371
+ }
372
+ }
373
+ };
374
+
375
+ // Update activity indicators
376
+ const updateActivityIndicators = (state = null) => {
377
+ userActivityIndicator.textContent = isUserSpeaking ? "User: Speaking" : "User: Idle";
378
+ userActivityIndicator.className = isUserSpeaking
379
+ ? "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-blue-400 to-blue-600 hover:bg-gradient-to-r from-blue-500 to-blue-700"
380
+ : "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
381
+
382
+ if (isRequestInProgress && !currentAudio) {
383
+ aiActivityIndicator.textContent = "AI: Processing...";
384
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-purple-400 to-purple-600 hover:bg-gradient-to-r from-purple-500 to-purple-700"; // Tailwind class for thinking
385
+ } else if (currentAudio && !isUserSpeaking) {
386
+ aiActivityIndicator.textContent = state || "AI: Speaking";
387
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-green-400 to-green-600 hover:bg-gradient-to-r from-green-500 to-green-700"; // Tailwind class for speaking
388
+ } else if (isUserSpeaking) {
389
+ aiActivityIndicator.textContent = "AI: Listening";
390
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-yellow-400 to-yellow-600 hover:bg-gradient-to-r from-yellow-500 to-yellow-700"; // Tailwind class for listening
391
+ } else {
392
+ aiActivityIndicator.textContent = "AI: Idle";
393
+ aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
394
+ }
395
+ };
396
+
397
+ // Initialize speech recognition
398
+ if ('webkitSpeechRecognition' in window) {
399
+ speechRecognizer = new webkitSpeechRecognition();
400
+ Object.assign(speechRecognizer, {
401
+ continuous: true,
402
+ interimResults: true,
403
+ language: 'en-US',
404
+ maxAlternatives: 3
405
+ });
406
+
407
+ speechRecognizer.onstart = () => {
408
+ console.log("Speech recognition started");
409
+ completeTranscript = '';
410
+ isUserSpeaking = true;
411
+ lastUserSpeechTimestamp = Date.now();
412
+ updateActivityIndicators();
413
+ startStopButton.innerHTML = `
414
+ <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
415
+ <path d="M9 9h6v6h-6z"></path>
416
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
417
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
418
+ <line x1="12" y1="19" x2="12" y2="23"></line>
419
+ <line x1="8" y1="23" x2="16" y2="23"></line>
420
+ </svg>
421
+ Stop Listening
422
+ `;
423
+ };
424
+
425
+ speechRecognizer.onresult = (event) => {
426
+ let interimTranscript = '';
427
+ for (let i = event.resultIndex; i < event.results.length; i++) {
428
+ const transcript = event.results[i][0].transcript;
429
+ if (event.results[i].isFinal) {
430
+ completeTranscript += transcript;
431
+ interruptAudioPlayback('final');
432
+ processSpeechTranscript(completeTranscript);
433
+ completeTranscript = '';
434
+ isUserSpeaking = false;
435
+ updateActivityIndicators();
436
+ queryStartTime = Date.now();
437
+ } else {
438
+ interimTranscript += transcript;
439
+ isUserSpeaking = true;
440
+ lastUserSpeechTimestamp = Date.now();
441
+ updateActivityIndicators();
442
+
443
+ if (interimTranscript.length > prefetchTextQuery.length + 5) {
444
+ cancelPrefetchRequests(prefetchTextQuery);
445
+ }
446
+ prefetchTextQuery = interimTranscript;
447
+ prefetchFirstAudioChunk(interimTranscript, voiceSelectionDropdown.value);
448
+ }
449
+ }
450
+ };
451
+
452
+ speechRecognizer.onerror = (event) => {
453
+ console.error('Speech recognition error:', event.error);
454
+ if (isSpeechRecognitionActive) speechRecognizer.start();
455
+ };
456
+
457
+ speechRecognizer.onend = () => {
458
+ isUserSpeaking = false;
459
+ updateActivityIndicators();
460
+
461
+ if (!isRequestInProgress && completeTranscript !== '') {
462
+ processSpeechTranscript(completeTranscript);
463
+ completeTranscript = '';
464
+ }
465
+
466
+ if (isSpeechRecognitionActive) speechRecognizer.start();
467
+ };
468
+
469
+ startStopButton.addEventListener('click', () => {
470
+ if (isSpeechRecognitionActive && !isRequestInProgress) { // Stop Listening
471
+ speechRecognizer.stop();
472
+ isSpeechRecognitionActive = false;
473
+ startStopButton.innerHTML = `
474
+ <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
475
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
476
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
477
+ <line x1="12" y1="19" x2="12" y2="23"></line>
478
+ <line x1="8" y1="23" x2="16" y2="23"></line>
479
+ </svg>
480
+ Start Listening
481
+ `;
482
+ } else if (isSpeechRecognitionActive && isRequestInProgress || currentAudio) { // Interrupt AI
483
+ interruptAudioPlayback('button interrupt');
484
+ speechRecognizer.start();
485
+ isSpeechRecognitionActive = true; // Keep recognition active
486
+ startStopButton.innerHTML = `
487
+ <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
488
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
489
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
490
+ <line x1="12" y1="19" x2="12" y2="23"></line>
491
+ <line x1="8" y1="23" x2="16" y2="23"></line>
492
+ </svg>
493
+ Interrupt AI
494
+ `; // Replace with your SVG
495
+ } else { // Start Listening
496
+ speechRecognizer.start();
497
+ isSpeechRecognitionActive = true;
498
+ startStopButton.innerHTML = `
499
+ <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
500
+ <path d="M9 9h6v6h-6z"></path>
501
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
502
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
503
+ <line x1="12" y1="19" x2="12" y2="23"></line>
504
+ <line x1="8" y1="23" x2="16" y2="23"></line>
505
+ </svg>
506
+ Stop Listening
507
+ `; // Replace with your SVG
508
+ }
509
+ });
510
+ } else {
511
+ alert('Your browser does not support the Web Speech API.');
512
+ }
513
+
514
+ // Add to conversation history
515
+ const addToConversationHistory = (role, content) => {
516
+ if (conversationHistory.length > 0 &&
517
+ conversationHistory[conversationHistory.length - 1].role === 'assistant' &&
518
+ conversationHistory[conversationHistory.length - 1].content === "") {
519
+ conversationHistory.pop();
520
+ }
521
+
522
+ conversationHistory.push({ role, content });
523
+
524
+ if (conversationHistory.length > 6) conversationHistory.splice(0, 2);
525
+ };
526
+
527
+ // Process the final speech transcript
528
+ const processSpeechTranscript = (transcript) => {
529
+ const trimmedTranscript = transcript.trimStart();
530
+ if (trimmedTranscript !== '' && !isRequestInProgress) {
531
+ activeQuery = trimmedTranscript;
532
+ sendQueryToAI(activeQuery);
533
+ addToConversationHistory('user', activeQuery);
534
+ transcriptDiv.textContent = '';
535
+ }
536
+ };
537
+
538
+ // Interrupt audio playback
539
+ const interruptAudioPlayback = (reason = 'unknown') => {
540
+ console.log(`Interrupting audio (reason: ${reason})...`);
541
+ if (currentAudio) {
542
+ currentAudio.pause();
543
+ currentAudio.currentTime = 0;
544
+ currentAudio = null;
545
+ }
546
+
547
+ audioPlaybackQueue.length = 0;
548
+ isRequestInProgress = false;
549
+
550
+ if (requestAbortController) {
551
+ requestAbortController.abort();
552
+ requestAbortController = null;
553
+ }
554
+
555
+ prefetchCache.clear();
556
+ prefetchQueue.length = 0;
557
+ updateActivityIndicators();
558
+ };
559
+
560
+ // Cancel pending prefetch requests
561
+ const cancelPrefetchRequests = (query) => {
562
+ const normalizedQuery = normalizeQueryText(query);
563
+
564
+ for (const [cacheKey, abortController] of pendingPrefetchRequests) {
565
+ if (cacheKey.startsWith(normalizedQuery)) {
566
+ abortController.abort();
567
+ pendingPrefetchRequests.delete(cacheKey);
568
+ }
569
+ }
570
+ };
571
+
572
+ // Update latency display
573
+ const updateLatency = () => {
574
+ if (firstResponseTextTimestamp) {
575
+ const latency = firstResponseTextTimestamp - queryStartTime;
576
+ responseTimeDisplay.textContent = `Latency: ${latency}ms`;
577
+ } else {
578
+ responseTimeDisplay.textContent = "Latency: 0ms";
579
+ }
580
+ };
581
+
582
+ setInterval(updateLatency, 200);
secure.js DELETED
@@ -1 +0,0 @@
1
- (function(a,b){"object"==typeof exports&&"undefined"!=typeof module?module.exports=b():"function"==typeof define&&define.amd?define(b):(a="undefined"==typeof globalThis?a||self:globalThis,a.DisableDevtool=b())})(this,function(){"use strict";function a(){if(p.redirectUrl)window.location.href=p.redirectUrl;else if(p.replacementHTML)try{document.documentElement.innerHTML=p.replacementHTML}catch(a){document.documentElement.innerText=p.replacementHTML}else{try{window.opener=null,window.open("","_self"),window.close(),window.history.back()}catch(a){console.log(a)}setTimeout(function(){window.location.href=p.timeoutRedirectUrl||`https://theajack.github.io/disable-devtool/404.html?h=${encodeURIComponent(location.host)}`},500)}}function b(a={}){for(const b in p)void 0!==a[b]&&(s(p[b])===s(a[b])||-1!==q.indexOf(b))&&(p[b]=a[b]);"function"==typeof p.onDevToolClose&&!0===p.stopIntervalOnOpen&&(p.stopIntervalOnOpen=!1,console.warn("stopIntervalOnOpen is invalid when using onDevToolClose"))}function c(){const a=navigator.userAgent.toLowerCase(),b="number"==typeof navigator.maxTouchPoints?1<navigator.maxTouchPoints:/android|iphone|ipad|ipod|arch/i.test(navigator.platform.toLowerCase())||/(iphone|ipad|ipod|ios|android)/i.test(a),c=!!window.top&&window!==window.top,d=a.includes("qqbrowser"),e=a.includes("firefox"),f=a.includes("macintosh"),g=a.includes("edge"),h=g&&!a.includes("chrome"),i=h||a.includes("trident")||a.includes("msie"),j=a.includes("crios"),k=a.includes("edgios"),l=a.includes("chrome")||j,m=!b&&/(googlebot|baiduspider|bingbot|applebot|petalbot|yandexbot|bytespider|chrome\-lighthouse|moto g power)/i.test(a);Object.assign(r,{isWithinIframe:c,isPC:!b,isQQBrowser:d,isFirefox:e,isMacOS:f,isEdge:g,isLegacyEdge:h,isIE:i,isIOSChrome:j,isIOSEdge:k,isChrome:l,isSearchBot:m,isMobile:b})}function d(){const a={};for(let b=0;500>b;b++)a[`${b}`]=`${b}`;const b=[];for(let c=0;50>c;c++)b.push(a);return b}function e(){p.clearConsoleOnInit&&x()}function f(a){z[a]=!1}function g(){for(const a in z)if(z[a])return y=!0;return y=!1}function h(){const a=p.ignoreUrlPatterns;if(a){if("function"==typeof a)return a();if(0!==a.length){const b=location.href;if(A===b)return B;A=b;for(const c of a)if("string"==typeof c&&b.includes(c)||c instanceof RegExp&&c.test(b))return B=!0}}return B=!1}function i(a){const b=r.isMacOS,c=b?a=>a.metaKey&&a.altKey&&(73===a.keyCode||105===a.keyCode):a=>a.ctrlKey&&a.shiftKey&&(73===a.keyCode||105===a.keyCode),d=b?a=>a.metaKey&&a.altKey&&85===a.keyCode||a.metaKey&&83===a.keyCode:a=>a.ctrlKey&&(83===a.keyCode||85===a.keyCode);a.addEventListener("keydown",function(b){const e=b.keyCode||b.which;if(123===e||c(b)||d(b))return l(a,b)},!0),p.disableRightClick&&a.addEventListener("contextmenu",function(b){if("touch"!==b.pointerType)return l(a,b)},!1),p.disableTextSelect&&j(a,"selectstart"),p.disableCopy&&j(a,"copy"),p.disableCut&&j(a,"cut"),p.disablePaste&&j(a,"paste")}function j(a,b){a.addEventListener(b,k)}function k(a){if(!h()&&!K.isDetectionSuspended)return a.preventDefault(),!1}function l(a,b){if(!h()&&!K.isDetectionSuspended)return(b=b||a.event).returnValue=!1,b.preventDefault(),!1}function m(){H=setInterval(()=>{if(!(K.isDetectionSuspended||h())){for(const a of F)f(a.type),a.detect(J++);if(e(),"function"==typeof p.onDevToolClose){const a=y;!g()&&a&&p.onDevToolClose()}}},p.checkInterval)}function n(){clearInterval(H)}function o(a){let b=0;for(let c=0;c<a.length;c++)b=(b<<5)-b+a.charCodeAt(c),b|=0;return b.toString(16)}const p={onDevToolOpen:a,onDevToolClose:null,redirectUrl:"",timeoutRedirectUrl:"",urlTokenParam:"ddtk",checkInterval:500,disableRightClick:!0,stopCheckAfter:5e3,stopIntervalOnOpen:!1,enabledDetectors:[0,1,3,4,5,6,7],clearConsoleOnInit:!0,disableTextSelect:!1,disableCopy:!1,disableCut:!1,disablePaste:!1,ignoreUrlPatterns:null,disableDevToolsInParentFrames:!1,detectSearchEngineBots:!1,replacementHTML:""},q=["enabledDetectors","onDevToolClose","ignoreUrlPatterns"],r={isWithinIframe:!1,isPC:!1,isQQBrowser:!1,isFirefox:!1,isMacOS:!1,isEdge:!1,isLegacyEdge:!1,isIE:!1,isIOSChrome:!1,isIOSEdge:!1,isChrome:!1,isSearchBot:!1,isMobile:!1},s=a=>"object"==typeof a&&null!==a,t=()=>new Date().getTime(),u=a=>{const b=t();return a(),t()-b};let v=console.log,w=console.table,x=console.clear;r.isIE&&(v=(...a)=>console.log(...a),w=(...a)=>console.table(...a),x=()=>console.clear());let y=!1;const z={};let A="",B=!1;const C={RegexToString:0,DefinePropertyID:1,WindowSize:2,DateToString:3,FunctionToString:4,DebuggerStatement:5,Performance:6,ExternalLibrary:7};class D{constructor({type:a,isEnabled:b=!0}){this.type=C.Unknown,this.isEnabled=!0,this.type=a,this.isEnabled=b,this.isEnabled&&(F.push(this),this.init())}onDevToolOpen(){console.warn(`DevTool usage is prohibited! [Detector: ${this.type}]`),p.stopIntervalOnOpen&&n(),clearTimeout(I),p.onDevToolOpen(this.type,a),z[this.type]=!0}init(){}detect(){}}class E extends D{constructor(){super({type:C.ExternalLibrary})}init(){}detect(){(window.eruda&&window.eruda._devTools&&window.eruda._devTools._isShow||window._vcOrigConsole&&window.document.querySelector("#__vconsole.vc-toggle"))&&this.onDevToolOpen()}static isExternalLibraryInUse(){return!!window.eruda||!!window._vcOrigConsole}}const F=[],G={[C.RegexToString]:class a extends D{constructor(){super({type:C.RegexToString,isEnabled:r.isQQBrowser||r.isFirefox})}init(){this.lastDetection=0,this.regex=/./,v(this.regex),this.regex.toString=()=>{if(r.isQQBrowser){const a=t();this.lastDetection&&100>a-this.lastDetection?this.onDevToolOpen():this.lastDetection=a}else r.isFirefox&&this.onDevToolOpen();return""}}detect(){v(this.regex)}},[C.DefinePropertyID]:class a extends D{constructor(){super({type:C.DefinePropertyID})}init(){this.div=document.createElement("div"),this.div.__defineGetter__("id",()=>this.onDevToolOpen()),Object.defineProperty(this.div,"id",{get:()=>this.onDevToolOpen()})}detect(){v(this.div)}},[C.WindowSize]:class a extends D{constructor(){super({type:C.WindowSize,isEnabled:!r.isWithinIframe&&!r.isEdge})}init(){this.checkWindowSize(),window.addEventListener("resize",()=>{setTimeout(()=>this.checkWindowSize(),100)},!0)}detect(){}checkWindowSize(){const a=window.devicePixelRatio||window.screen&&window.screen.deviceXDPI&&window.screen.logicalXDPI&&window.screen.deviceXDPI/window.screen.logicalXDPI;if(!1!==a){const b=200<window.outerWidth-window.innerWidth*a,c=300<window.outerHeight-window.innerHeight*a;if(b||c)return this.onDevToolOpen(),!1;f(this.type)}return!0}},[C.DateToString]:class a extends D{constructor(){super({type:C.DateToString,isEnabled:!r.isIOSChrome&&!r.isIOSEdge})}init(){this.modifyCount=0,this.date=new Date,this.date.toString=()=>(this.modifyCount++,"")}detect(){this.modifyCount=0,v(this.date),e(),2<=this.modifyCount&&this.onDevToolOpen()}},[C.FunctionToString]:class a extends D{constructor(){super({type:C.FunctionToString,isEnabled:!r.isIOSChrome&&!r.isIOSEdge})}init(){this.modifyCount=0,this.func=function(){},this.func.toString=()=>(this.modifyCount++,"")}detect(){this.modifyCount=0,v(this.func),e(),2<=this.modifyCount&&this.onDevToolOpen()}},[C.DebuggerStatement]:class a extends D{constructor(){super({type:C.DebuggerStatement,isEnabled:r.isIOSChrome||r.isIOSEdge})}detect(){const a=t();debugger;100<t()-a&&this.onDevToolOpen()}},[C.Performance]:class a extends D{constructor(){super({type:C.Performance,isEnabled:r.isChrome||!r.isMobile})}init(){this.maxPrintTime=0,this.largeArray=d()}detect(){const a=u(()=>w(this.largeArray)),b=u(()=>v(this.largeArray));return this.maxPrintTime=Math.max(this.maxPrintTime,b),e(),0!==a&&0!==this.maxPrintTime&&void(a>10*this.maxPrintTime&&this.onDevToolOpen())}},[C.ExternalLibrary]:E};let H=0,I=0,J=0;const K=Object.assign(function(a={}){function d(a=""){return{success:!a,reason:a}}if(K.isRunning)return d("already running");c(),x=r.isIE?console.clear:console.clear,b(a);const e=new URLSearchParams(window.location.search).get(p.urlTokenParam);if(e&&o(e)===o(window.location.href))return d("token passed");if(p.detectSearchEngineBots&&r.isSearchBot)return d("search bot");K.isRunning=!0,m();const f=window.top,g=window.parent;if(p.disableDevToolsInParentFrames&&f&&g&&f!==window){for(let a=window;a!==f;)i(a),a=a.parent;i(f)}i(window);const h="all"===p.enabledDetectors?Object.keys(G):p.enabledDetectors;return h.forEach(a=>{new G[a]}),I=setTimeout(function(){r.isPC||E.isExternalLibraryInUse()||n()},p.stopCheckAfter),d()},{isRunning:!1,isDetectionSuspended:!1,calculateMD5:o,version:"0.0.1",DetectorType:C,isDevToolOpened:g}),L=function(){if("undefined"==typeof window||!window.document)return null;const a=document.querySelector("[secura-auto]");if(!a)return null;const b=["disable-menu","disable-select","disable-copy","disable-cut","disable-paste","clear-log"],c=["interval"],d={};["md5","url","tk-name","detectors",...b,...c].forEach(e=>{const f=a.getAttribute(e);if(null!==f){const a=e.split("-").map((a,b)=>0===b?a:a.charAt(0).toUpperCase()+a.slice(1)).join("");d[a]=c.includes(e)?parseInt(f):b.includes(e)?"false"!==f:"detectors"===e&&"all"!==f?f.split(" "):f}});const e={md5:"urlToken",url:"redirectUrl","tk-name":"urlTokenParam",disableMenu:"disableRightClick",disableSelect:"disableTextSelect",clearLog:"clearConsoleOnInit",interval:"checkInterval"};for(const a in e)void 0!==d[a]&&(d[e[a]]=d[a],delete d[a]);return d}();return L&&K(L),K});