Spaces:
Running
Running
UI Overhaul and changing provider to SambaNova
#2
by
KingNish
- opened
- app.py +114 -2
- index.html +249 -64
- requirements.txt +1 -3
- script1.js +527 -1
- script2.js +582 -1
- secure.js +0 -1
app.py
CHANGED
@@ -1,4 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
-
from
|
3 |
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException, Request
|
2 |
+
from fastapi.responses import StreamingResponse
|
3 |
+
from fastapi.middleware.cors import CORSMiddleware
|
4 |
+
import aiohttp
|
5 |
+
import json
|
6 |
+
import time
|
7 |
+
import random
|
8 |
+
import ast
|
9 |
+
import urllib.parse
|
10 |
+
from apscheduler.schedulers.background import BackgroundScheduler
|
11 |
import os
|
12 |
+
from pydantic import BaseModel
|
13 |
|
14 |
+
SAMBA_NOVA_API_KEY = os.environ.get("SAMBA_NOVA_API_KEY", None)
|
15 |
+
|
16 |
+
app = FastAPI()
|
17 |
+
|
18 |
+
# Time-Limited Infinite Cache
|
19 |
+
cache = {}
|
20 |
+
CACHE_DURATION = 120
|
21 |
+
|
22 |
+
# Function to clean up expired cache entries
|
23 |
+
def cleanup_cache():
|
24 |
+
current_time = time.time()
|
25 |
+
for key, (value, timestamp) in list(cache.items()):
|
26 |
+
if current_time - timestamp > CACHE_DURATION:
|
27 |
+
del cache[key]
|
28 |
+
|
29 |
+
# Initialize and start the scheduler
|
30 |
+
scheduler = BackgroundScheduler()
|
31 |
+
scheduler.add_job(cleanup_cache, 'interval', seconds=60) # Run cleanup every 60 seconds
|
32 |
+
scheduler.start()
|
33 |
+
|
34 |
+
class StreamTextRequest(BaseModel):
|
35 |
+
query: str
|
36 |
+
history: str = "[]"
|
37 |
+
model: str = "llama3-8b"
|
38 |
+
api_key: str = None
|
39 |
+
|
40 |
+
@app.post("/stream_text")
|
41 |
+
async def stream_text(request: StreamTextRequest):
|
42 |
+
current_time = time.time()
|
43 |
+
cache_key = (request.query, request.history, request.model)
|
44 |
+
|
45 |
+
# Check if the request is in the cache and not expired
|
46 |
+
if cache_key in cache:
|
47 |
+
cached_response, timestamp = cache[cache_key]
|
48 |
+
return StreamingResponse(iter([f"{cached_response}"]), media_type='text/event-stream')
|
49 |
+
|
50 |
+
# Model selection logic
|
51 |
+
if "405" in request.model:
|
52 |
+
fmodel = "Meta-Llama-3.1-405B-Instruct"
|
53 |
+
if "70" in request.model:
|
54 |
+
fmodel = "Meta-Llama-3.1-70B-Instruct"
|
55 |
+
else:
|
56 |
+
fmodel = "Meta-Llama-3.1-8B-Instruct"
|
57 |
+
|
58 |
+
system_message = """You are Voicee, a friendly and intelligent voice assistant created by KingNish. Your primary goal is to provide accurate, concise, and engaging responses while maintaining a positive and upbeat tone. Always aim to provide clear and relevant information that directly addresses the user's query, but feel free to sprinkle in a dash of humor—after all, laughter is the best app! Keep your responses brief and to the point, avoiding unnecessary details or tangents, unless they’re hilariously relevant. Use a friendly and approachable tone to create a pleasant interaction, and don’t shy away from a cheeky pun or two! Tailor your responses based on the user's input and previous interactions, ensuring a personalized experience that feels like chatting with a witty friend. Invite users to ask follow-up questions or clarify their needs, fostering a conversational flow that’s as smooth as butter on a hot pancake. Aim to put a smile on the user's face with light-hearted and fun responses, and be proactive in offering additional help or suggestions related to the user's query. Remember, your goal is to be the go-to assistant for users, making their experience enjoyable and informative—like a delightful dessert after a hearty meal!"""
|
59 |
+
|
60 |
+
messages = [{'role': 'system', 'content': system_message}]
|
61 |
+
|
62 |
+
messages.extend(ast.literal_eval(request.history))
|
63 |
+
|
64 |
+
messages.append({'role': 'user', 'content': request.query})
|
65 |
+
|
66 |
+
data = {'messages': messages, 'stream': True, 'model': fmodel}
|
67 |
+
|
68 |
+
api_key = request.api_key or SAMBA_NOVA_API_KEY
|
69 |
+
|
70 |
+
async def stream_response():
|
71 |
+
async with aiohttp.ClientSession() as session:
|
72 |
+
async with session.post('https://api.sambanova.ai/v1/chat/completions', headers = { 'Authorization': f'Bearer {api_key}', 'Content-Type': 'application/json' }, json=data) as response:
|
73 |
+
if response.status != 200:
|
74 |
+
raise HTTPException(status_code=response.status, detail="Error fetching AI response")
|
75 |
+
|
76 |
+
response_content = ""
|
77 |
+
async for line in response.content:
|
78 |
+
line = line.decode('utf-8').strip()
|
79 |
+
if line.startswith('data: {'):
|
80 |
+
json_data = line[6:]
|
81 |
+
try:
|
82 |
+
parsed_data = json.loads(json_data)
|
83 |
+
content = parsed_data.get("choices", [{}])[0].get("delta", {}).get("content", '')
|
84 |
+
if content:
|
85 |
+
content = content.replace("\n", " ")
|
86 |
+
response_content += f"data: {content}\n\n"
|
87 |
+
yield f"data: {content}\n\n"
|
88 |
+
except json.JSONDecodeError as e:
|
89 |
+
print(f"Error decoding JSON: {e}")
|
90 |
+
yield f"data: Error decoding JSON\n\n"
|
91 |
+
|
92 |
+
# Cache the full response
|
93 |
+
cache[cache_key] = (response_content, current_time)
|
94 |
+
|
95 |
+
return StreamingResponse(stream_response(), media_type='text/event-stream')
|
96 |
+
|
97 |
+
|
98 |
+
|
99 |
+
# Serve index.html from the same directory as your main.py file
|
100 |
+
from starlette.responses import FileResponse
|
101 |
+
|
102 |
+
@app.get("/script1.js")
|
103 |
+
async def script1_js():
|
104 |
+
return FileResponse("script1.js")
|
105 |
+
|
106 |
+
@app.get("/script2.js")
|
107 |
+
async def script2_js():
|
108 |
+
return FileResponse("script2.js")
|
109 |
+
|
110 |
+
@app.get("/")
|
111 |
+
async def read_index():
|
112 |
+
return FileResponse('index.html')
|
113 |
+
|
114 |
+
if __name__ == "__main__":
|
115 |
+
import uvicorn
|
116 |
+
uvicorn.run(app, host="0.0.0.0", port=7068, reload=True)
|
index.html
CHANGED
@@ -1,66 +1,251 @@
|
|
1 |
<!DOCTYPE html>
|
2 |
-
<html lang=en>
|
3 |
-
<head>
|
4 |
-
<meta charset=UTF-8>
|
5 |
-
<meta name=viewport content="width=device-width, initial-scale=1.0">
|
6 |
-
<title>Voice Assistant</title>
|
7 |
-
<link href=https://
|
8 |
-
<style
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
66 |
</html>
|
|
|
1 |
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
6 |
+
<title>Voice Assistant</title>
|
7 |
+
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;700&display=swap" rel="stylesheet">
|
8 |
+
<style>
|
9 |
+
:root {
|
10 |
+
--primary-color: #4a90e2;
|
11 |
+
--secondary-color: #f39c12;
|
12 |
+
--background-color: #f0f4f8;
|
13 |
+
--card-bg-color: #ffffff;
|
14 |
+
--text-color: #333333;
|
15 |
+
--border-color: #e0e0e0;
|
16 |
+
}
|
17 |
+
|
18 |
+
body {
|
19 |
+
font-family: 'Roboto', sans-serif;
|
20 |
+
background-color: var(--background-color);
|
21 |
+
color: var(--text-color);
|
22 |
+
margin: 0;
|
23 |
+
padding: 0;
|
24 |
+
display: flex;
|
25 |
+
justify-content: center;
|
26 |
+
align-items: center;
|
27 |
+
min-height: 100vh;
|
28 |
+
}
|
29 |
+
|
30 |
+
.container {
|
31 |
+
width: 90%;
|
32 |
+
max-width: 800px;
|
33 |
+
}
|
34 |
+
|
35 |
+
.voice-assistant-card {
|
36 |
+
background-color: var(--card-bg-color);
|
37 |
+
border-radius: 20px;
|
38 |
+
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.1);
|
39 |
+
padding: 40px;
|
40 |
+
text-align: center;
|
41 |
+
}
|
42 |
+
|
43 |
+
.title {
|
44 |
+
font-size: 2.5rem;
|
45 |
+
font-weight: 700;
|
46 |
+
margin-bottom: 20px;
|
47 |
+
color: var(--primary-color);
|
48 |
+
}
|
49 |
+
|
50 |
+
#responseTime {
|
51 |
+
font-size: 0.9rem;
|
52 |
+
color: #777;
|
53 |
+
margin-bottom: 20px;
|
54 |
+
}
|
55 |
+
|
56 |
+
.indicator-wrapper {
|
57 |
+
display: flex;
|
58 |
+
justify-content: space-around;
|
59 |
+
margin-bottom: 30px;
|
60 |
+
}
|
61 |
+
|
62 |
+
.indicator {
|
63 |
+
display: flex;
|
64 |
+
align-items: center;
|
65 |
+
padding: 10px 20px;
|
66 |
+
border-radius: 50px;
|
67 |
+
font-size: 1rem;
|
68 |
+
color: #fff;
|
69 |
+
transition: all 0.3s ease;
|
70 |
+
}
|
71 |
+
|
72 |
+
.indicator svg {
|
73 |
+
margin-right: 8px;
|
74 |
+
}
|
75 |
+
|
76 |
+
#userIndicator {
|
77 |
+
background-color: var(--primary-color);
|
78 |
+
}
|
79 |
+
|
80 |
+
#aiIndicator {
|
81 |
+
background-color: var(--secondary-color);
|
82 |
+
}
|
83 |
+
|
84 |
+
#startStopButton {
|
85 |
+
background-color: #38cb96;
|
86 |
+
color: #fff;
|
87 |
+
border: none;
|
88 |
+
padding: 15px 30px;
|
89 |
+
font-size: 1.2rem;
|
90 |
+
border-radius: 50px;
|
91 |
+
cursor: pointer;
|
92 |
+
transition: all 0.3s ease;
|
93 |
+
display: flex;
|
94 |
+
align-items: center;
|
95 |
+
justify-content: center;
|
96 |
+
margin: 0 auto 30px;
|
97 |
+
}
|
98 |
+
|
99 |
+
#startStopButton:hover {
|
100 |
+
background-color: #1e9b6e;
|
101 |
+
transform: translateY(-2px);
|
102 |
+
box-shadow: 0 5px 15px rgba(74, 144, 226, 0.3);
|
103 |
+
}
|
104 |
+
|
105 |
+
#startStopButton svg {
|
106 |
+
margin-right: 10px;
|
107 |
+
}
|
108 |
+
|
109 |
+
.settings {
|
110 |
+
display: grid;
|
111 |
+
grid-template-columns: 1fr 1fr 1.5fr;
|
112 |
+
gap: 20px;
|
113 |
+
margin-bottom: 30px;
|
114 |
+
}
|
115 |
+
|
116 |
+
.setting {
|
117 |
+
text-align: left;
|
118 |
+
position: relative;
|
119 |
+
/* Added for tooltip positioning */
|
120 |
+
}
|
121 |
+
|
122 |
+
.setting label {
|
123 |
+
display: block;
|
124 |
+
margin-bottom: 5px;
|
125 |
+
font-weight: 700;
|
126 |
+
color: var(--text-color);
|
127 |
+
}
|
128 |
+
|
129 |
+
select,
|
130 |
+
input[type="password"] {
|
131 |
+
width: 100%;
|
132 |
+
padding: 10px;
|
133 |
+
border: 1px solid var(--border-color);
|
134 |
+
border-radius: 5px;
|
135 |
+
font-size: 1rem;
|
136 |
+
background-color: #fff;
|
137 |
+
color: var(--text-color);
|
138 |
+
}
|
139 |
+
|
140 |
+
.tooltip {
|
141 |
+
display: none;
|
142 |
+
position: absolute;
|
143 |
+
background-color: #333;
|
144 |
+
color: #fff;
|
145 |
+
padding: 5px;
|
146 |
+
border-radius: 5px;
|
147 |
+
font-size: 0.8rem;
|
148 |
+
}
|
149 |
+
|
150 |
+
.setting:hover .tooltip {
|
151 |
+
display: block;
|
152 |
+
/* Show tooltip on hover */
|
153 |
+
}
|
154 |
+
|
155 |
+
#transcript {
|
156 |
+
background-color: #f9f9f9;
|
157 |
+
border-radius: 10px;
|
158 |
+
padding: 20px;
|
159 |
+
margin-top: 30px;
|
160 |
+
text-align: left;
|
161 |
+
font-family: 'Courier New', monospace;
|
162 |
+
white-space: pre-wrap;
|
163 |
+
max-height: 200px;
|
164 |
+
overflow-y: auto;
|
165 |
+
}
|
166 |
+
|
167 |
+
@media (max-width: 600px) {
|
168 |
+
.settings {
|
169 |
+
grid-template-columns: 1fr;
|
170 |
+
}
|
171 |
+
}
|
172 |
+
</style>
|
173 |
+
</head>
|
174 |
+
<body>
|
175 |
+
<div class="container">
|
176 |
+
<div class="voice-assistant-card">
|
177 |
+
<h1 class="title">Voice Assistant</h1>
|
178 |
+
<div id="responseTime">Latency: 0ms</div>
|
179 |
+
<div class="indicator-wrapper">
|
180 |
+
<div id="userIndicator" class="indicator">
|
181 |
+
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
182 |
+
<path d="M20 21v-2a4 4 0 0 0-4-4H8a4 4 0 0 0-4 4v2"></path>
|
183 |
+
<circle cx="12" cy="7" r="4"></circle>
|
184 |
+
</svg>
|
185 |
+
<span>User: Idle</span>
|
186 |
+
</div>
|
187 |
+
<div id="aiIndicator" class="indicator">
|
188 |
+
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
189 |
+
<polygon points="12 2 2 7 12 12 22 7 12 2"></polygon>
|
190 |
+
<polyline points="2 17 12 22 22 17"></polyline>
|
191 |
+
<polyline points="2 12 12 17 22 12"></polyline>
|
192 |
+
</svg>
|
193 |
+
<span>AI: Idle</span>
|
194 |
+
</div>
|
195 |
+
</div>
|
196 |
+
<button id="startStopButton">
|
197 |
+
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
198 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
199 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
200 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
201 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
202 |
+
</svg> Start Listening </button>
|
203 |
+
<div class="settings">
|
204 |
+
<div class="setting">
|
205 |
+
<label for="voiceSelect">Voice:</label>
|
206 |
+
<select id="voiceSelect">
|
207 |
+
<option value="Amy">Female</option>
|
208 |
+
<option value="Brian">Male</option>
|
209 |
+
</select>
|
210 |
+
<span class="tooltip">Select the voice type for the assistant.</span>
|
211 |
+
</div>
|
212 |
+
<div class="setting">
|
213 |
+
<label for="modelSelect">Model:</label>
|
214 |
+
<select id="modelSelect">
|
215 |
+
<option value="8b">Fastest</option>
|
216 |
+
<option value="70b">Powerful</option>
|
217 |
+
</select>
|
218 |
+
<span class="tooltip">Choose the model based on speed or power.</span>
|
219 |
+
</div>
|
220 |
+
<div class="setting">
|
221 |
+
<label for="apiKey">SambaNava API Key (optional):</label>
|
222 |
+
<input type="password" id="apiKey" placeholder="Enter your API Key">
|
223 |
+
<span class="tooltip">Use SambaNova API key for enhanced speed. You can obtain a free key from <a href="https://cloud.sambanova.ai/apis" style="color: #00f9f0;">https://cloud.sambanova.ai/apis</a>. </span>
|
224 |
+
</div>
|
225 |
+
</div>
|
226 |
+
<div id="transcript"></div>
|
227 |
+
</div>
|
228 |
+
</div>
|
229 |
+
<script>
|
230 |
+
function loadScript() {
|
231 |
+
var isChrome = /Chrome/.test(navigator.userAgent) && /Google Inc/.test(navigator.vendor) && !/Edg/.test(navigator.userAgent);
|
232 |
+
var isDesktop = window.innerWidth > 768;
|
233 |
+
var existingScript = document.querySelector('script[src="script1.js"], script[src="script2.js"]');
|
234 |
+
if (existingScript) {
|
235 |
+
existingScript.remove();
|
236 |
+
}
|
237 |
+
var script = document.createElement('script');
|
238 |
+
if (isChrome && isDesktop) {
|
239 |
+
script.src = 'script1.js';
|
240 |
+
} else {
|
241 |
+
script.src = 'script2.js';
|
242 |
+
}
|
243 |
+
script.onerror = function() {
|
244 |
+
console.error('Error loading script:', script.src);
|
245 |
+
};
|
246 |
+
document.head.appendChild(script);
|
247 |
+
}
|
248 |
+
document.addEventListener('DOMContentLoaded', loadScript);
|
249 |
+
</script>
|
250 |
+
</body>
|
251 |
</html>
|
requirements.txt
CHANGED
@@ -1,8 +1,6 @@
|
|
1 |
-
groq
|
2 |
fastapi
|
3 |
starlette
|
4 |
python-multipart
|
5 |
uvicorn
|
6 |
aiohttp
|
7 |
-
apscheduler
|
8 |
-
scikit-learn
|
|
|
|
|
1 |
fastapi
|
2 |
starlette
|
3 |
python-multipart
|
4 |
uvicorn
|
5 |
aiohttp
|
6 |
+
apscheduler
|
|
script1.js
CHANGED
@@ -1 +1,527 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const startStopButton = document.getElementById('startStopButton');
|
2 |
+
const voiceSelectionDropdown = document.getElementById('voiceSelect');
|
3 |
+
const modelSelectionDropdown = document.getElementById('modelSelect');
|
4 |
+
const noiseSuppressionCheckbox = document.getElementById('noiseSuppression');
|
5 |
+
const responseTimeDisplay = document.getElementById('responseTime');
|
6 |
+
const userActivityIndicator = document.getElementById('userIndicator');
|
7 |
+
const aiActivityIndicator = document.getElementById('aiIndicator');
|
8 |
+
const transcriptDiv = document.getElementById('transcript');
|
9 |
+
|
10 |
+
let speechRecognizer;
|
11 |
+
let activeQuery = null;
|
12 |
+
let queryStartTime = 0;
|
13 |
+
let completeTranscript = '';
|
14 |
+
let isRequestInProgress = false;
|
15 |
+
let isUserSpeaking = false;
|
16 |
+
let isSpeechRecognitionActive = false;
|
17 |
+
let requestAbortController = null;
|
18 |
+
let partialTranscript = '';
|
19 |
+
let lastUserSpeechTimestamp = null;
|
20 |
+
let prefetchTextQuery = "";
|
21 |
+
let firstResponseTextTimestamp = null;
|
22 |
+
|
23 |
+
// Configuration
|
24 |
+
const USER_SPEECH_INTERRUPT_DELAY = 500;
|
25 |
+
const TEXT_TO_SPEECH_API_ENDPOINT = "https://api.streamelements.com/kappa/v2/speech";
|
26 |
+
const CHUNK_SIZE = 300;
|
27 |
+
|
28 |
+
// Audio Management
|
29 |
+
let currentAudio = null;
|
30 |
+
let audioPlaybackQueue = [];
|
31 |
+
let prefetchQueue = [];
|
32 |
+
|
33 |
+
// Enhanced Prefetching and Caching
|
34 |
+
const prefetchCache = new Map();
|
35 |
+
const pendingPrefetchRequests = new Map();
|
36 |
+
const MAX_PREFETCH_REQUESTS = 10;
|
37 |
+
const prefetchCacheExpiration = 60000; // 1 minute
|
38 |
+
|
39 |
+
// Global Conversation History
|
40 |
+
let conversationHistory = [];
|
41 |
+
|
42 |
+
// Audio Caching
|
43 |
+
const audioCache = new Map();
|
44 |
+
const audioCacheExpiration = 3600000; // 1 hour
|
45 |
+
|
46 |
+
// Normalize query text
|
47 |
+
const normalizeQueryText = query => query.trim().toLowerCase().replace(/[^\w\s]/g, '');
|
48 |
+
|
49 |
+
// Generate a cache key
|
50 |
+
const generateCacheKey = (normalizedQuery, voice, history, modelName) =>
|
51 |
+
`${normalizedQuery}-${voice}-${JSON.stringify(history)}-${modelName}`;
|
52 |
+
|
53 |
+
// Prefetch and cache the first TTS audio chunk
|
54 |
+
const prefetchFirstAudioChunk = (query, voice) => {
|
55 |
+
const normalizedQuery = normalizeQueryText(query);
|
56 |
+
const cacheKey = generateCacheKey(normalizedQuery, voice, conversationHistory, modelSelectionDropdown.value);
|
57 |
+
|
58 |
+
if (pendingPrefetchRequests.has(cacheKey) || prefetchCache.has(cacheKey)) return;
|
59 |
+
|
60 |
+
prefetchQueue.push({ query:query.trim(), voice, cacheKey });
|
61 |
+
processPrefetchQueue();
|
62 |
+
};
|
63 |
+
|
64 |
+
// Process the prefetch queue
|
65 |
+
const processPrefetchQueue = async () => {
|
66 |
+
while (prefetchQueue.length > 0 && pendingPrefetchRequests.size < MAX_PREFETCH_REQUESTS) {
|
67 |
+
const { query, voice, cacheKey } = prefetchQueue.shift();
|
68 |
+
const abortController = new AbortController();
|
69 |
+
pendingPrefetchRequests.set(cacheKey, abortController);
|
70 |
+
|
71 |
+
const url = '/stream_text';
|
72 |
+
const requestBody = {
|
73 |
+
query: query,
|
74 |
+
history: JSON.stringify(conversationHistory),
|
75 |
+
model: modelSelectionDropdown.value
|
76 |
+
};
|
77 |
+
|
78 |
+
try {
|
79 |
+
const response = await fetch(url, {
|
80 |
+
method: 'POST',
|
81 |
+
headers: {
|
82 |
+
'Accept': 'text/event-stream',
|
83 |
+
'Content-Type': 'application/json'
|
84 |
+
},
|
85 |
+
body: JSON.stringify(requestBody),
|
86 |
+
signal: abortController.signal
|
87 |
+
});
|
88 |
+
|
89 |
+
if (!response.ok) throw new Error('Network response was not ok');
|
90 |
+
|
91 |
+
const firstAudioUrl = await handleStreamingResponseForPrefetch(response.body, voice, abortController.signal);
|
92 |
+
|
93 |
+
if (firstAudioUrl) prefetchCache.set(cacheKey, { url: firstAudioUrl, timestamp: Date.now() });
|
94 |
+
|
95 |
+
} catch (error) {
|
96 |
+
if (error.name !== 'AbortError') console.error("Error prefetching audio:", error);
|
97 |
+
} finally {
|
98 |
+
pendingPrefetchRequests.delete(cacheKey);
|
99 |
+
processPrefetchQueue();
|
100 |
+
}
|
101 |
+
}
|
102 |
+
};
|
103 |
+
|
104 |
+
// Handle the streaming response for prefetching
|
105 |
+
const handleStreamingResponseForPrefetch = async (responseStream, voice, abortSignal) => {
|
106 |
+
const reader = responseStream.getReader();
|
107 |
+
const decoder = new TextDecoder("utf-8");
|
108 |
+
let buffer = "";
|
109 |
+
|
110 |
+
try {
|
111 |
+
while (true) {
|
112 |
+
const { done, value } = await reader.read();
|
113 |
+
if (done) break;
|
114 |
+
if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
|
115 |
+
|
116 |
+
const chunk = decoder.decode(value, { stream: true });
|
117 |
+
buffer += chunk;
|
118 |
+
const lines = buffer.split('\n');
|
119 |
+
|
120 |
+
for (let i = 0; i < lines.length - 1; i++) {
|
121 |
+
const line = lines[i];
|
122 |
+
if (line.startsWith('data: ')) {
|
123 |
+
const textContent = line.substring(6).trim();
|
124 |
+
if (textContent) {
|
125 |
+
const audioUrl = await generateTextToSpeechAudio(textContent, voice);
|
126 |
+
return audioUrl;
|
127 |
+
}
|
128 |
+
}
|
129 |
+
}
|
130 |
+
|
131 |
+
buffer = lines[lines.length - 1];
|
132 |
+
}
|
133 |
+
} catch (error) {
|
134 |
+
console.error("Error in handleStreamingResponseForPrefetch:", error);
|
135 |
+
} finally {
|
136 |
+
reader.releaseLock();
|
137 |
+
}
|
138 |
+
|
139 |
+
return null;
|
140 |
+
};
|
141 |
+
|
142 |
+
// Play audio from the queue
|
143 |
+
const playNextAudio = async () => {
|
144 |
+
if (audioPlaybackQueue.length > 0) {
|
145 |
+
const audioData = audioPlaybackQueue.shift();
|
146 |
+
const audio = new Audio(audioData.url);
|
147 |
+
updateActivityIndicators();
|
148 |
+
|
149 |
+
const audioPromise = new Promise(resolve => {
|
150 |
+
audio.onended = resolve;
|
151 |
+
audio.onerror = resolve;
|
152 |
+
});
|
153 |
+
if (currentAudio) {
|
154 |
+
currentAudio.pause();
|
155 |
+
currentAudio.currentTime = 0;
|
156 |
+
}
|
157 |
+
|
158 |
+
currentAudio = audio;
|
159 |
+
await audio.play();
|
160 |
+
await audioPromise;
|
161 |
+
playNextAudio();
|
162 |
+
} else {
|
163 |
+
updateActivityIndicators();
|
164 |
+
}
|
165 |
+
};
|
166 |
+
|
167 |
+
// Generate Text-to-Speech audio with caching
|
168 |
+
const generateTextToSpeechAudio = async (text, voice) => {
|
169 |
+
const normalizedText = normalizeQueryText(text);
|
170 |
+
const cacheKey = `${normalizedText}-${voice}`;
|
171 |
+
|
172 |
+
if (audioCache.has(cacheKey)) {
|
173 |
+
const cachedData = audioCache.get(cacheKey);
|
174 |
+
if (Date.now() - cachedData.timestamp < audioCacheExpiration) {
|
175 |
+
return cachedData.url;
|
176 |
+
} else {
|
177 |
+
audioCache.delete(cacheKey);
|
178 |
+
}
|
179 |
+
}
|
180 |
+
|
181 |
+
try {
|
182 |
+
const response = await fetch(`${TEXT_TO_SPEECH_API_ENDPOINT}?voice=${voice}&text=${encodeURIComponent(text)}`, { method: 'GET' });
|
183 |
+
if (!response.ok) throw new Error('Network response was not ok');
|
184 |
+
const audioBlob = await response.blob();
|
185 |
+
const audioUrl = URL.createObjectURL(audioBlob);
|
186 |
+
|
187 |
+
audioCache.set(cacheKey, { url: audioUrl, timestamp: Date.now() });
|
188 |
+
return audioUrl;
|
189 |
+
} catch (error) {
|
190 |
+
console.error("Error generating TTS audio:", error);
|
191 |
+
return null;
|
192 |
+
}
|
193 |
+
};
|
194 |
+
|
195 |
+
// Send a query to the AI
|
196 |
+
const sendQueryToAI = async (query) => {
|
197 |
+
console.log("Sending query to AI:", query);
|
198 |
+
isRequestInProgress = true;
|
199 |
+
updateActivityIndicators();
|
200 |
+
queryStartTime = Date.now();
|
201 |
+
firstResponseTextTimestamp = null;
|
202 |
+
|
203 |
+
const normalizedQuery = normalizeQueryText(query);
|
204 |
+
const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
|
205 |
+
|
206 |
+
if (prefetchCache.has(cacheKey)) {
|
207 |
+
const cachedData = prefetchCache.get(cacheKey);
|
208 |
+
if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
|
209 |
+
const prefetchedAudioUrl = cachedData.url;
|
210 |
+
audioPlaybackQueue.push({ url: prefetchedAudioUrl, isPrefetched: true });
|
211 |
+
playNextAudio();
|
212 |
+
} else {
|
213 |
+
prefetchCache.delete(cacheKey);
|
214 |
+
}
|
215 |
+
}
|
216 |
+
|
217 |
+
requestAbortController = new AbortController();
|
218 |
+
|
219 |
+
const url = '/stream_text';
|
220 |
+
const requestBody = {
|
221 |
+
query: query,
|
222 |
+
history: JSON.stringify(conversationHistory),
|
223 |
+
model: modelSelectionDropdown.value
|
224 |
+
};
|
225 |
+
|
226 |
+
try {
|
227 |
+
const response = await fetch(url, {
|
228 |
+
method: 'POST',
|
229 |
+
headers: {
|
230 |
+
'Accept': 'text/event-stream',
|
231 |
+
'Content-Type': 'application/json'
|
232 |
+
},
|
233 |
+
body: JSON.stringify(requestBody),
|
234 |
+
signal: requestAbortController.signal
|
235 |
+
});
|
236 |
+
|
237 |
+
if (!response.ok) {
|
238 |
+
if (response.status === 429) {
|
239 |
+
console.log("Rate limit hit, retrying in 1 second...");
|
240 |
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
241 |
+
await sendQueryToAI(query);
|
242 |
+
return;
|
243 |
+
}
|
244 |
+
throw new Error(`Network response was not ok: ${response.status}`);
|
245 |
+
}
|
246 |
+
|
247 |
+
console.log("Streaming audio response received");
|
248 |
+
await handleStreamingResponse(response.body, voiceSelectionDropdown.value, requestAbortController.signal);
|
249 |
+
} catch (error) {
|
250 |
+
if (error.name !== 'AbortError') {
|
251 |
+
console.error("Error sending query to AI:", error);
|
252 |
+
}
|
253 |
+
} finally {
|
254 |
+
isRequestInProgress = false;
|
255 |
+
updateActivityIndicators();
|
256 |
+
}
|
257 |
+
};
|
258 |
+
|
259 |
+
// Handle the streaming audio response
|
260 |
+
const handleStreamingResponse = async (responseStream, voice, abortSignal) => {
|
261 |
+
const reader = responseStream.getReader();
|
262 |
+
const decoder = new TextDecoder("utf-8");
|
263 |
+
let buffer = "";
|
264 |
+
let initialChunksSent = 0;
|
265 |
+
let fullResponseText = "";
|
266 |
+
let fullResponseText2 = "";
|
267 |
+
let textChunk = "";
|
268 |
+
let sentText = "";
|
269 |
+
|
270 |
+
try {
|
271 |
+
while (true) {
|
272 |
+
const { done, value } = await reader.read();
|
273 |
+
if (done) break;
|
274 |
+
if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
|
275 |
+
|
276 |
+
if (isUserSpeaking) {
|
277 |
+
interruptAudioPlayback('user is speaking');
|
278 |
+
break;
|
279 |
+
}
|
280 |
+
|
281 |
+
const chunk = decoder.decode(value, { stream: true });
|
282 |
+
buffer += chunk;
|
283 |
+
const lines = buffer.split('\n');
|
284 |
+
|
285 |
+
for (let i = 0; i < lines.length - 1; i++) {
|
286 |
+
const line = lines[i];
|
287 |
+
if (line.startsWith('data: ')) {
|
288 |
+
const textContent = line.substring(6).trim();
|
289 |
+
if (textContent) {
|
290 |
+
if (!firstResponseTextTimestamp) firstResponseTextTimestamp = Date.now();
|
291 |
+
|
292 |
+
fullResponseText += textContent + " ";
|
293 |
+
fullResponseText2 += textContent + " ";
|
294 |
+
textChunk += textContent + " ";
|
295 |
+
transcriptDiv.textContent = fullResponseText2;
|
296 |
+
|
297 |
+
if (initialChunksSent < 2) {
|
298 |
+
const audioUrl = await generateTextToSpeechAudio(textContent, voice);
|
299 |
+
if (audioUrl) {
|
300 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
301 |
+
if (!currentAudio) playNextAudio();
|
302 |
+
}
|
303 |
+
sentText += textContent + " ";
|
304 |
+
initialChunksSent++;
|
305 |
+
} else {
|
306 |
+
let unsentTextChunk = textChunk.replace(sentText, '').trim();
|
307 |
+
|
308 |
+
if (unsentTextChunk.length >= CHUNK_SIZE) {
|
309 |
+
const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
|
310 |
+
if (audioUrl) {
|
311 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
312 |
+
if (!currentAudio) playNextAudio();
|
313 |
+
}
|
314 |
+
textChunk = "";
|
315 |
+
}
|
316 |
+
}
|
317 |
+
|
318 |
+
if (fullResponseText !== '') {
|
319 |
+
fullResponseText = '';
|
320 |
+
}
|
321 |
+
}
|
322 |
+
}
|
323 |
+
}
|
324 |
+
|
325 |
+
buffer = lines[lines.length - 1];
|
326 |
+
}
|
327 |
+
} catch (error) {
|
328 |
+
console.error("Error in handleStreamingResponse:", error);
|
329 |
+
} finally {
|
330 |
+
reader.releaseLock();
|
331 |
+
|
332 |
+
let unsentTextChunk = textChunk.replace(sentText, '').trim();
|
333 |
+
if (unsentTextChunk !== "") {
|
334 |
+
const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
|
335 |
+
if (audioUrl) {
|
336 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
337 |
+
if (!currentAudio) playNextAudio();
|
338 |
+
}
|
339 |
+
}
|
340 |
+
|
341 |
+
if (fullResponseText !== '') {
|
342 |
+
fullResponseText = '';
|
343 |
+
}
|
344 |
+
if (fullResponseText2 !== '') {
|
345 |
+
addToConversationHistory('assistant', fullResponseText2);
|
346 |
+
fullResponseText2 = '';
|
347 |
+
}
|
348 |
+
}
|
349 |
+
};
|
350 |
+
|
351 |
+
// Update activity indicators
|
352 |
+
const updateActivityIndicators = (state = null) => {
|
353 |
+
userActivityIndicator.textContent = isUserSpeaking ? "User: Speaking" : "User: Idle";
|
354 |
+
userActivityIndicator.className = isUserSpeaking
|
355 |
+
? "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-blue-400 to-blue-600 hover:bg-gradient-to-r from-blue-500 to-blue-700"
|
356 |
+
: "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
|
357 |
+
|
358 |
+
if (isRequestInProgress && !currentAudio) {
|
359 |
+
aiActivityIndicator.textContent = "AI: Processing...";
|
360 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-purple-400 to-purple-600 hover:bg-gradient-to-r from-purple-500 to-purple-700"; // Tailwind class for thinking
|
361 |
+
} else if (currentAudio && !isUserSpeaking) {
|
362 |
+
aiActivityIndicator.textContent = state || "AI: Speaking";
|
363 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-green-400 to-green-600 hover:bg-gradient-to-r from-green-500 to-green-700"; // Tailwind class for speaking
|
364 |
+
} else if (isUserSpeaking) {
|
365 |
+
aiActivityIndicator.textContent = "AI: Listening";
|
366 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-yellow-400 to-yellow-600 hover:bg-gradient-to-r from-yellow-500 to-yellow-700"; // Tailwind class for listening
|
367 |
+
} else {
|
368 |
+
aiActivityIndicator.textContent = "AI: Idle";
|
369 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
|
370 |
+
}
|
371 |
+
};
|
372 |
+
|
373 |
+
|
374 |
+
// Initialize speech recognition
|
375 |
+
if ('webkitSpeechRecognition' in window) {
|
376 |
+
speechRecognizer = new webkitSpeechRecognition();
|
377 |
+
Object.assign(speechRecognizer, {
|
378 |
+
continuous: true,
|
379 |
+
interimResults: true,
|
380 |
+
language: 'en-US',
|
381 |
+
maxAlternatives: 3
|
382 |
+
});
|
383 |
+
|
384 |
+
speechRecognizer.onstart = () => {
|
385 |
+
console.log("Speech recognition started");
|
386 |
+
completeTranscript = '';
|
387 |
+
isUserSpeaking = true;
|
388 |
+
lastUserSpeechTimestamp = Date.now();
|
389 |
+
updateActivityIndicators();
|
390 |
+
startStopButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M9 9h6v6h-6z"></path><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg> Stop Listening';
|
391 |
+
};
|
392 |
+
|
393 |
+
speechRecognizer.onresult = (event) => {
|
394 |
+
let interimTranscript = '';
|
395 |
+
for (let i = event.resultIndex; i < event.results.length; i++) {
|
396 |
+
const transcript = event.results[i][0].transcript;
|
397 |
+
if (event.results[i].isFinal) {
|
398 |
+
completeTranscript += transcript;
|
399 |
+
interruptAudioPlayback('final');
|
400 |
+
processSpeechTranscript(completeTranscript);
|
401 |
+
completeTranscript = '';
|
402 |
+
isUserSpeaking = false;
|
403 |
+
updateActivityIndicators();
|
404 |
+
queryStartTime = Date.now();
|
405 |
+
} else {
|
406 |
+
interimTranscript += transcript;
|
407 |
+
isUserSpeaking = true;
|
408 |
+
lastUserSpeechTimestamp = Date.now();
|
409 |
+
updateActivityIndicators();
|
410 |
+
|
411 |
+
if (interimTranscript.length > prefetchTextQuery.length + 5) {
|
412 |
+
cancelPrefetchRequests(prefetchTextQuery);
|
413 |
+
}
|
414 |
+
prefetchTextQuery = interimTranscript;
|
415 |
+
prefetchFirstAudioChunk(interimTranscript, voiceSelectionDropdown.value);
|
416 |
+
|
417 |
+
if (isRequestInProgress && shouldInterruptAudioPlayback(interimTranscript)) {
|
418 |
+
interruptAudioPlayback('interim');
|
419 |
+
}
|
420 |
+
}
|
421 |
+
}
|
422 |
+
};
|
423 |
+
|
424 |
+
speechRecognizer.onerror = (event) => {
|
425 |
+
console.error('Speech recognition error:', event.error);
|
426 |
+
if (isSpeechRecognitionActive) speechRecognizer.start();
|
427 |
+
};
|
428 |
+
|
429 |
+
speechRecognizer.onend = () => {
|
430 |
+
isUserSpeaking = false;
|
431 |
+
updateActivityIndicators();
|
432 |
+
|
433 |
+
if (!isRequestInProgress && completeTranscript !== '') {
|
434 |
+
processSpeechTranscript(completeTranscript);
|
435 |
+
completeTranscript = '';
|
436 |
+
}
|
437 |
+
|
438 |
+
if (isSpeechRecognitionActive) speechRecognizer.start();
|
439 |
+
};
|
440 |
+
|
441 |
+
startStopButton.addEventListener('click', () => {
|
442 |
+
if (isSpeechRecognitionActive) {
|
443 |
+
speechRecognizer.stop();
|
444 |
+
isSpeechRecognitionActive = false;
|
445 |
+
startStopButton.innerHTML = '<svg id="microphoneIcon" xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg> Start Listening';
|
446 |
+
} else {
|
447 |
+
speechRecognizer.start();
|
448 |
+
isSpeechRecognitionActive = true;
|
449 |
+
startStopButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M9 9h6v6h-6z"></path><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg> Stop Listening';
|
450 |
+
}
|
451 |
+
});
|
452 |
+
} else {
|
453 |
+
alert('Your browser does not support the Web Speech API.');
|
454 |
+
}
|
455 |
+
|
456 |
+
// Add to conversation history
|
457 |
+
const addToConversationHistory = (role, content) => {
|
458 |
+
if (conversationHistory.length > 0 &&
|
459 |
+
conversationHistory[conversationHistory.length - 1].role === 'assistant' &&
|
460 |
+
conversationHistory[conversationHistory.length - 1].content === "") {
|
461 |
+
conversationHistory.pop();
|
462 |
+
}
|
463 |
+
|
464 |
+
conversationHistory.push({ role, content });
|
465 |
+
|
466 |
+
if (conversationHistory.length > 6) conversationHistory.splice(0, 2);
|
467 |
+
};
|
468 |
+
|
469 |
+
// Process the final speech transcript
|
470 |
+
const processSpeechTranscript = (transcript) => {
|
471 |
+
const trimmedTranscript = transcript.trimStart();
|
472 |
+
if (trimmedTranscript !== '' && !isRequestInProgress) {
|
473 |
+
activeQuery = trimmedTranscript;
|
474 |
+
sendQueryToAI(activeQuery);
|
475 |
+
addToConversationHistory('user', activeQuery);
|
476 |
+
}
|
477 |
+
};
|
478 |
+
|
479 |
+
// Check if audio playback should be interrupted
|
480 |
+
const shouldInterruptAudioPlayback = (interimTranscript) =>
|
481 |
+
Date.now() - lastUserSpeechTimestamp > USER_SPEECH_INTERRUPT_DELAY || interimTranscript.length > 5;
|
482 |
+
|
483 |
+
// Interrupt audio playback
|
484 |
+
const interruptAudioPlayback = (reason = 'unknown') => {
|
485 |
+
console.log(`Interrupting audio (reason: ${reason})...`);
|
486 |
+
if (currentAudio) {
|
487 |
+
currentAudio.pause();
|
488 |
+
currentAudio.currentTime = 0;
|
489 |
+
currentAudio = null;
|
490 |
+
}
|
491 |
+
|
492 |
+
audioPlaybackQueue.length = 0;
|
493 |
+
isRequestInProgress = false;
|
494 |
+
|
495 |
+
if (requestAbortController) {
|
496 |
+
requestAbortController.abort();
|
497 |
+
requestAbortController = null;
|
498 |
+
}
|
499 |
+
|
500 |
+
prefetchCache.clear();
|
501 |
+
prefetchQueue.length = 0;
|
502 |
+
updateActivityIndicators();
|
503 |
+
};
|
504 |
+
|
505 |
+
// Cancel pending prefetch requests
|
506 |
+
const cancelPrefetchRequests = (query) => {
|
507 |
+
const normalizedQuery = normalizeQueryText(query);
|
508 |
+
|
509 |
+
for (const [cacheKey, abortController] of pendingPrefetchRequests) {
|
510 |
+
if (cacheKey.startsWith(normalizedQuery)) {
|
511 |
+
abortController.abort();
|
512 |
+
pendingPrefetchRequests.delete(cacheKey);pendingPrefetchRequests.delete(cacheKey);
|
513 |
+
}
|
514 |
+
}
|
515 |
+
};
|
516 |
+
|
517 |
+
// Update latency display
|
518 |
+
const updateLatency = () => {
|
519 |
+
if (firstResponseTextTimestamp) {
|
520 |
+
const latency = firstResponseTextTimestamp - queryStartTime;
|
521 |
+
responseTimeDisplay.textContent = `Latency: ${latency}ms`;
|
522 |
+
} else {
|
523 |
+
responseTimeDisplay.textContent = "Latency: 0ms";
|
524 |
+
}
|
525 |
+
};
|
526 |
+
|
527 |
+
setInterval(updateLatency, 200);
|
script2.js
CHANGED
@@ -1 +1,582 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const startStopButton = document.getElementById('startStopButton');
|
2 |
+
const voiceSelectionDropdown = document.getElementById('voiceSelect');
|
3 |
+
const modelSelectionDropdown = document.getElementById('modelSelect');
|
4 |
+
const noiseSuppressionCheckbox = document.getElementById('noiseSuppression');
|
5 |
+
const responseTimeDisplay = document.getElementById('responseTime');
|
6 |
+
const userActivityIndicator = document.getElementById('userIndicator');
|
7 |
+
const aiActivityIndicator = document.getElementById('aiIndicator');
|
8 |
+
const transcriptDiv = document.getElementById('transcript');
|
9 |
+
|
10 |
+
let speechRecognizer;
|
11 |
+
let activeQuery = null;
|
12 |
+
let queryStartTime = 0;
|
13 |
+
let completeTranscript = '';
|
14 |
+
let isRequestInProgress = false;
|
15 |
+
let isUserSpeaking = false;
|
16 |
+
let isSpeechRecognitionActive = false;
|
17 |
+
let requestAbortController = null;
|
18 |
+
let partialTranscript = '';
|
19 |
+
let lastUserSpeechTimestamp = null;
|
20 |
+
let prefetchTextQuery = "";
|
21 |
+
let firstResponseTextTimestamp = null;
|
22 |
+
|
23 |
+
// Configuration
|
24 |
+
const USER_SPEECH_INTERRUPT_DELAY = 500;
|
25 |
+
const TEXT_TO_SPEECH_API_ENDPOINT = "https://api.streamelements.com/kappa/v2/speech";
|
26 |
+
const CHUNK_SIZE = 300;
|
27 |
+
|
28 |
+
// Audio Management
|
29 |
+
let currentAudio = null;
|
30 |
+
let audioPlaybackQueue = [];
|
31 |
+
let prefetchQueue = [];
|
32 |
+
|
33 |
+
// Enhanced Prefetching and Caching
|
34 |
+
const prefetchCache = new Map();
|
35 |
+
const pendingPrefetchRequests = new Map();
|
36 |
+
const MAX_PREFETCH_REQUESTS = 10;
|
37 |
+
const prefetchCacheExpiration = 60000; // 1 minute
|
38 |
+
|
39 |
+
// Global Conversation History
|
40 |
+
let conversationHistory = [];
|
41 |
+
|
42 |
+
// Audio Caching
|
43 |
+
const audioCache = new Map();
|
44 |
+
const audioCacheExpiration = 3600000; // 1 hour
|
45 |
+
|
46 |
+
// Normalize query text
|
47 |
+
const normalizeQueryText = query => query.trim().toLowerCase().replace(/[^\w\s]/g, '');
|
48 |
+
|
49 |
+
// Generate a cache key
|
50 |
+
const generateCacheKey = (normalizedQuery, voice, history, modelName) =>
|
51 |
+
`${normalizedQuery}-${voice}-${JSON.stringify(history)}-${modelName}`;
|
52 |
+
|
53 |
+
// Prefetch and cache the first TTS audio chunk
|
54 |
+
const prefetchFirstAudioChunk = (query, voice) => {
|
55 |
+
const normalizedQuery = normalizeQueryText(query);
|
56 |
+
const cacheKey = generateCacheKey(normalizedQuery, voice, conversationHistory, modelSelectionDropdown.value);
|
57 |
+
|
58 |
+
if (pendingPrefetchRequests.has(cacheKey) || prefetchCache.has(cacheKey)) return;
|
59 |
+
|
60 |
+
prefetchQueue.push({ query:query.trim(), voice, cacheKey });
|
61 |
+
processPrefetchQueue();
|
62 |
+
};
|
63 |
+
|
64 |
+
// Process the prefetch queue
|
65 |
+
const processPrefetchQueue = async () => {
|
66 |
+
while (prefetchQueue.length > 0 && pendingPrefetchRequests.size < MAX_PREFETCH_REQUESTS) {
|
67 |
+
const { query, voice, cacheKey } = prefetchQueue.shift();
|
68 |
+
const abortController = new AbortController();
|
69 |
+
pendingPrefetchRequests.set(cacheKey, abortController);
|
70 |
+
|
71 |
+
const url = '/stream_text';
|
72 |
+
const requestBody = {
|
73 |
+
query: query,
|
74 |
+
history: JSON.stringify(conversationHistory),
|
75 |
+
model: modelSelectionDropdown.value
|
76 |
+
};
|
77 |
+
|
78 |
+
try {
|
79 |
+
const response = await fetch(url, {
|
80 |
+
method: 'POST',
|
81 |
+
headers: {
|
82 |
+
'Accept': 'text/event-stream',
|
83 |
+
'Content-Type': 'application/json'
|
84 |
+
},
|
85 |
+
body: JSON.stringify(requestBody),
|
86 |
+
signal: abortController.signal
|
87 |
+
});
|
88 |
+
|
89 |
+
if (!response.ok) throw new Error('Network response was not ok');
|
90 |
+
|
91 |
+
const firstAudioUrl = await handleStreamingResponseForPrefetch(response.body, voice, abortController.signal);
|
92 |
+
|
93 |
+
if (firstAudioUrl) prefetchCache.set(cacheKey, { url: firstAudioUrl, timestamp: Date.now() });
|
94 |
+
|
95 |
+
} catch (error) {
|
96 |
+
if (error.name !== 'AbortError') console.error("Error prefetching audio:", error);
|
97 |
+
} finally {
|
98 |
+
pendingPrefetchRequests.delete(cacheKey);
|
99 |
+
processPrefetchQueue();
|
100 |
+
}
|
101 |
+
}
|
102 |
+
};
|
103 |
+
|
104 |
+
// Handle the streaming response for prefetching
|
105 |
+
const handleStreamingResponseForPrefetch = async (responseStream, voice, abortSignal) => {
|
106 |
+
const reader = responseStream.getReader();
|
107 |
+
const decoder = new TextDecoder("utf-8");
|
108 |
+
let buffer = "";
|
109 |
+
|
110 |
+
try {
|
111 |
+
while (true) {
|
112 |
+
const { done, value } = await reader.read();
|
113 |
+
if (done) break;
|
114 |
+
if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
|
115 |
+
|
116 |
+
const chunk = decoder.decode(value, { stream: true });
|
117 |
+
buffer += chunk;
|
118 |
+
const lines = buffer.split('\n');
|
119 |
+
|
120 |
+
for (let i = 0; i < lines.length - 1; i++) {
|
121 |
+
const line = lines[i];
|
122 |
+
if (line.startsWith('data: ')) {
|
123 |
+
const textContent = line.substring(6).trim();
|
124 |
+
if (textContent) {
|
125 |
+
const audioUrl = await generateTextToSpeechAudio(textContent, voice);
|
126 |
+
return audioUrl;
|
127 |
+
}
|
128 |
+
}
|
129 |
+
}
|
130 |
+
|
131 |
+
buffer = lines[lines.length - 1];
|
132 |
+
}
|
133 |
+
} catch (error) {
|
134 |
+
console.error("Error in handleStreamingResponseForPrefetch:", error);
|
135 |
+
} finally {
|
136 |
+
reader.releaseLock();
|
137 |
+
}
|
138 |
+
|
139 |
+
return null;
|
140 |
+
};
|
141 |
+
|
142 |
+
// Play audio from the queue
|
143 |
+
const playNextAudio = async () => {
|
144 |
+
if (audioPlaybackQueue.length > 0) {
|
145 |
+
const audioData = audioPlaybackQueue.shift();
|
146 |
+
const audio = new Audio(audioData.url);
|
147 |
+
updateActivityIndicators();
|
148 |
+
|
149 |
+
// Pause speech recognition if it's active
|
150 |
+
if (isSpeechRecognitionActive) {
|
151 |
+
speechRecognizer.stop();
|
152 |
+
isSpeechRecognitionActive = false;
|
153 |
+
startStopButton.innerHTML = `
|
154 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
155 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
156 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
157 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
158 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
159 |
+
</svg>
|
160 |
+
Interrupt AI
|
161 |
+
`;
|
162 |
+
}
|
163 |
+
|
164 |
+
const audioPromise = new Promise(resolve => {
|
165 |
+
audio.onended = resolve;
|
166 |
+
audio.onerror = resolve;
|
167 |
+
});
|
168 |
+
if (currentAudio) {
|
169 |
+
currentAudio.pause();
|
170 |
+
currentAudio.currentTime = 0;
|
171 |
+
}
|
172 |
+
|
173 |
+
currentAudio = audio;
|
174 |
+
await audio.play();
|
175 |
+
await audioPromise;
|
176 |
+
playNextAudio();
|
177 |
+
} else {
|
178 |
+
updateActivityIndicators();
|
179 |
+
|
180 |
+
// Resume speech recognition if it was paused with a delay
|
181 |
+
setTimeout(() => {
|
182 |
+
if (!isSpeechRecognitionActive) {
|
183 |
+
speechRecognizer.start();
|
184 |
+
isSpeechRecognitionActive = true;
|
185 |
+
startStopButton.innerHTML = `
|
186 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
187 |
+
<path d="M9 9h6v6h-6z"></path>
|
188 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
189 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
190 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
191 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
192 |
+
</svg>
|
193 |
+
Stop Listening
|
194 |
+
`;
|
195 |
+
}
|
196 |
+
}, 100);
|
197 |
+
}
|
198 |
+
};
|
199 |
+
|
200 |
+
// Generate Text-to-Speech audio with caching
|
201 |
+
const generateTextToSpeechAudio = async (text, voice) => {
|
202 |
+
const normalizedText = normalizeQueryText(text);
|
203 |
+
const cacheKey = `${normalizedText}-${voice}`;
|
204 |
+
|
205 |
+
if (audioCache.has(cacheKey)) {
|
206 |
+
const cachedData = audioCache.get(cacheKey);
|
207 |
+
if (Date.now() - cachedData.timestamp < audioCacheExpiration) {
|
208 |
+
return cachedData.url;
|
209 |
+
} else {
|
210 |
+
audioCache.delete(cacheKey);
|
211 |
+
}
|
212 |
+
}
|
213 |
+
|
214 |
+
try {
|
215 |
+
const response = await fetch(`${TEXT_TO_SPEECH_API_ENDPOINT}?voice=${voice}&text=${encodeURIComponent(text)}`, { method: 'GET' });
|
216 |
+
if (!response.ok) throw new Error('Network response was not ok');
|
217 |
+
const audioBlob = await response.blob();
|
218 |
+
const audioUrl = URL.createObjectURL(audioBlob);
|
219 |
+
|
220 |
+
audioCache.set(cacheKey, { url: audioUrl, timestamp: Date.now() });
|
221 |
+
return audioUrl;
|
222 |
+
} catch (error) {
|
223 |
+
console.error("Error generating TTS audio:", error);
|
224 |
+
return null;
|
225 |
+
}
|
226 |
+
};
|
227 |
+
|
228 |
+
// Send a query to the AI
|
229 |
+
const sendQueryToAI = async (query) => {
|
230 |
+
console.log("Sending query to AI:", query);
|
231 |
+
isRequestInProgress = true;
|
232 |
+
updateActivityIndicators();
|
233 |
+
queryStartTime = Date.now();
|
234 |
+
firstResponseTextTimestamp = null;
|
235 |
+
|
236 |
+
const normalizedQuery = normalizeQueryText(query);
|
237 |
+
const cacheKey = generateCacheKey(normalizedQuery, modelSelectionDropdown.value, conversationHistory, modelSelectionDropdown.value);
|
238 |
+
|
239 |
+
if (prefetchCache.has(cacheKey)) {
|
240 |
+
const cachedData = prefetchCache.get(cacheKey);
|
241 |
+
if (Date.now() - cachedData.timestamp < prefetchCacheExpiration) {
|
242 |
+
const prefetchedAudioUrl = cachedData.url;
|
243 |
+
audioPlaybackQueue.push({ url: prefetchedAudioUrl, isPrefetched: true });
|
244 |
+
playNextAudio();
|
245 |
+
} else {
|
246 |
+
prefetchCache.delete(cacheKey);
|
247 |
+
}
|
248 |
+
}
|
249 |
+
|
250 |
+
requestAbortController = new AbortController();
|
251 |
+
|
252 |
+
const url = '/stream_text';
|
253 |
+
const requestBody = {
|
254 |
+
query: query,
|
255 |
+
history: JSON.stringify(conversationHistory),
|
256 |
+
model: modelSelectionDropdown.value
|
257 |
+
};
|
258 |
+
|
259 |
+
try {
|
260 |
+
const response = await fetch(url, {
|
261 |
+
method: 'POST',
|
262 |
+
headers: {
|
263 |
+
'Accept': 'text/event-stream',
|
264 |
+
'Content-Type': 'application/json'
|
265 |
+
},
|
266 |
+
body: JSON.stringify(requestBody),
|
267 |
+
signal: requestAbortController.signal
|
268 |
+
});
|
269 |
+
|
270 |
+
if (!response.ok) {
|
271 |
+
if (response.status === 429) {
|
272 |
+
console.log("Rate limit hit, retrying in 1 second...");
|
273 |
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
274 |
+
await sendQueryToAI(query);
|
275 |
+
return;
|
276 |
+
}
|
277 |
+
throw new Error(`Network response was not ok: ${response.status}`);
|
278 |
+
}
|
279 |
+
|
280 |
+
console.log("Streaming audio response received");
|
281 |
+
await handleStreamingResponse(response.body, voiceSelectionDropdown.value, requestAbortController.signal);
|
282 |
+
} catch (error) {
|
283 |
+
if (error.name !== 'AbortError') {
|
284 |
+
console.error("Error sending query to AI:", error);
|
285 |
+
}
|
286 |
+
} finally {
|
287 |
+
isRequestInProgress = false;
|
288 |
+
updateActivityIndicators();
|
289 |
+
}
|
290 |
+
};
|
291 |
+
|
292 |
+
// Handle the streaming audio response
|
293 |
+
const handleStreamingResponse = async (responseStream, voice, abortSignal) => {
|
294 |
+
const reader = responseStream.getReader();
|
295 |
+
const decoder = new TextDecoder("utf-8");
|
296 |
+
let buffer = "";
|
297 |
+
let initialChunksSent = 0;
|
298 |
+
let fullResponseText = "";
|
299 |
+
let textChunk = "";
|
300 |
+
let sentText = "";
|
301 |
+
|
302 |
+
try {
|
303 |
+
while (true) {
|
304 |
+
const { done, value } = await reader.read();
|
305 |
+
if (done) break;
|
306 |
+
if (abortSignal.aborted) throw new DOMException('Request aborted', 'AbortError');
|
307 |
+
|
308 |
+
if (isUserSpeaking) {
|
309 |
+
interruptAudioPlayback('user is speaking');
|
310 |
+
break;
|
311 |
+
}
|
312 |
+
|
313 |
+
const chunk = decoder.decode(value, { stream: true });
|
314 |
+
buffer += chunk;
|
315 |
+
const lines = buffer.split('\n');
|
316 |
+
|
317 |
+
for (let i = 0; i < lines.length - 1; i++) {
|
318 |
+
const line = lines[i];
|
319 |
+
if (line.startsWith('data: ')) {
|
320 |
+
const textContent = line.substring(6).trim();
|
321 |
+
if (textContent) {
|
322 |
+
if (!firstResponseTextTimestamp) firstResponseTextTimestamp = Date.now();
|
323 |
+
|
324 |
+
fullResponseText += textContent + " ";
|
325 |
+
textChunk += textContent + " ";
|
326 |
+
transcriptDiv.textContent = fullResponseText; // Update transcriptDiv
|
327 |
+
|
328 |
+
if (initialChunksSent < 2) {
|
329 |
+
const audioUrl = await generateTextToSpeechAudio(textContent, voice);
|
330 |
+
if (audioUrl) {
|
331 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
332 |
+
if (!currentAudio) playNextAudio();
|
333 |
+
}
|
334 |
+
sentText += textContent + " ";
|
335 |
+
initialChunksSent++;
|
336 |
+
} else {
|
337 |
+
let unsentTextChunk = textChunk.replace(sentText, '').trim();
|
338 |
+
|
339 |
+
if (unsentTextChunk.length >= CHUNK_SIZE) {
|
340 |
+
const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
|
341 |
+
if (audioUrl) {
|
342 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
343 |
+
if (!currentAudio) playNextAudio();
|
344 |
+
}
|
345 |
+
textChunk = "";
|
346 |
+
}
|
347 |
+
}
|
348 |
+
}
|
349 |
+
}
|
350 |
+
}
|
351 |
+
|
352 |
+
buffer = lines[lines.length - 1];
|
353 |
+
}
|
354 |
+
} catch (error) {
|
355 |
+
console.error("Error in handleStreamingResponse:", error);
|
356 |
+
} finally {
|
357 |
+
reader.releaseLock();
|
358 |
+
|
359 |
+
let unsentTextChunk = textChunk.replace(sentText, '').trim();
|
360 |
+
if (unsentTextChunk !== "") {
|
361 |
+
const audioUrl = await generateTextToSpeechAudio(unsentTextChunk, voice);
|
362 |
+
if (audioUrl) {
|
363 |
+
audioPlaybackQueue.push({ url: audioUrl, isPrefetched: false });
|
364 |
+
if (!currentAudio) playNextAudio();
|
365 |
+
}
|
366 |
+
}
|
367 |
+
|
368 |
+
if (fullResponseText !== '') {
|
369 |
+
addToConversationHistory('assistant', fullResponseText);
|
370 |
+
fullResponseText = ''; // Clear fullResponseText for the next response
|
371 |
+
}
|
372 |
+
}
|
373 |
+
};
|
374 |
+
|
375 |
+
// Update activity indicators
|
376 |
+
const updateActivityIndicators = (state = null) => {
|
377 |
+
userActivityIndicator.textContent = isUserSpeaking ? "User: Speaking" : "User: Idle";
|
378 |
+
userActivityIndicator.className = isUserSpeaking
|
379 |
+
? "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-blue-400 to-blue-600 hover:bg-gradient-to-r from-blue-500 to-blue-700"
|
380 |
+
: "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
|
381 |
+
|
382 |
+
if (isRequestInProgress && !currentAudio) {
|
383 |
+
aiActivityIndicator.textContent = "AI: Processing...";
|
384 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-purple-400 to-purple-600 hover:bg-gradient-to-r from-purple-500 to-purple-700"; // Tailwind class for thinking
|
385 |
+
} else if (currentAudio && !isUserSpeaking) {
|
386 |
+
aiActivityIndicator.textContent = state || "AI: Speaking";
|
387 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-green-400 to-green-600 hover:bg-gradient-to-r from-green-500 to-green-700"; // Tailwind class for speaking
|
388 |
+
} else if (isUserSpeaking) {
|
389 |
+
aiActivityIndicator.textContent = "AI: Listening";
|
390 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-yellow-400 to-yellow-600 hover:bg-gradient-to-r from-yellow-500 to-yellow-700"; // Tailwind class for listening
|
391 |
+
} else {
|
392 |
+
aiActivityIndicator.textContent = "AI: Idle";
|
393 |
+
aiActivityIndicator.className = "indicator rounded-full px-4 py-2 text-white flex items-center transition-colors duration-300 bg-gradient-to-r from-gray-300 to-gray-400 dark:from-gray-700 dark:to-gray-800 hover:bg-gradient-to-r from-gray-400 to-gray-500"; // Tailwind classes
|
394 |
+
}
|
395 |
+
};
|
396 |
+
|
397 |
+
// Initialize speech recognition
|
398 |
+
if ('webkitSpeechRecognition' in window) {
|
399 |
+
speechRecognizer = new webkitSpeechRecognition();
|
400 |
+
Object.assign(speechRecognizer, {
|
401 |
+
continuous: true,
|
402 |
+
interimResults: true,
|
403 |
+
language: 'en-US',
|
404 |
+
maxAlternatives: 3
|
405 |
+
});
|
406 |
+
|
407 |
+
speechRecognizer.onstart = () => {
|
408 |
+
console.log("Speech recognition started");
|
409 |
+
completeTranscript = '';
|
410 |
+
isUserSpeaking = true;
|
411 |
+
lastUserSpeechTimestamp = Date.now();
|
412 |
+
updateActivityIndicators();
|
413 |
+
startStopButton.innerHTML = `
|
414 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
415 |
+
<path d="M9 9h6v6h-6z"></path>
|
416 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
417 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
418 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
419 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
420 |
+
</svg>
|
421 |
+
Stop Listening
|
422 |
+
`;
|
423 |
+
};
|
424 |
+
|
425 |
+
speechRecognizer.onresult = (event) => {
|
426 |
+
let interimTranscript = '';
|
427 |
+
for (let i = event.resultIndex; i < event.results.length; i++) {
|
428 |
+
const transcript = event.results[i][0].transcript;
|
429 |
+
if (event.results[i].isFinal) {
|
430 |
+
completeTranscript += transcript;
|
431 |
+
interruptAudioPlayback('final');
|
432 |
+
processSpeechTranscript(completeTranscript);
|
433 |
+
completeTranscript = '';
|
434 |
+
isUserSpeaking = false;
|
435 |
+
updateActivityIndicators();
|
436 |
+
queryStartTime = Date.now();
|
437 |
+
} else {
|
438 |
+
interimTranscript += transcript;
|
439 |
+
isUserSpeaking = true;
|
440 |
+
lastUserSpeechTimestamp = Date.now();
|
441 |
+
updateActivityIndicators();
|
442 |
+
|
443 |
+
if (interimTranscript.length > prefetchTextQuery.length + 5) {
|
444 |
+
cancelPrefetchRequests(prefetchTextQuery);
|
445 |
+
}
|
446 |
+
prefetchTextQuery = interimTranscript;
|
447 |
+
prefetchFirstAudioChunk(interimTranscript, voiceSelectionDropdown.value);
|
448 |
+
}
|
449 |
+
}
|
450 |
+
};
|
451 |
+
|
452 |
+
speechRecognizer.onerror = (event) => {
|
453 |
+
console.error('Speech recognition error:', event.error);
|
454 |
+
if (isSpeechRecognitionActive) speechRecognizer.start();
|
455 |
+
};
|
456 |
+
|
457 |
+
speechRecognizer.onend = () => {
|
458 |
+
isUserSpeaking = false;
|
459 |
+
updateActivityIndicators();
|
460 |
+
|
461 |
+
if (!isRequestInProgress && completeTranscript !== '') {
|
462 |
+
processSpeechTranscript(completeTranscript);
|
463 |
+
completeTranscript = '';
|
464 |
+
}
|
465 |
+
|
466 |
+
if (isSpeechRecognitionActive) speechRecognizer.start();
|
467 |
+
};
|
468 |
+
|
469 |
+
startStopButton.addEventListener('click', () => {
|
470 |
+
if (isSpeechRecognitionActive && !isRequestInProgress) { // Stop Listening
|
471 |
+
speechRecognizer.stop();
|
472 |
+
isSpeechRecognitionActive = false;
|
473 |
+
startStopButton.innerHTML = `
|
474 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
475 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
476 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
477 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
478 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
479 |
+
</svg>
|
480 |
+
Start Listening
|
481 |
+
`;
|
482 |
+
} else if (isSpeechRecognitionActive && isRequestInProgress || currentAudio) { // Interrupt AI
|
483 |
+
interruptAudioPlayback('button interrupt');
|
484 |
+
speechRecognizer.start();
|
485 |
+
isSpeechRecognitionActive = true; // Keep recognition active
|
486 |
+
startStopButton.innerHTML = `
|
487 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
488 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
489 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
490 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
491 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
492 |
+
</svg>
|
493 |
+
Interrupt AI
|
494 |
+
`; // Replace with your SVG
|
495 |
+
} else { // Start Listening
|
496 |
+
speechRecognizer.start();
|
497 |
+
isSpeechRecognitionActive = true;
|
498 |
+
startStopButton.innerHTML = `
|
499 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
500 |
+
<path d="M9 9h6v6h-6z"></path>
|
501 |
+
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
|
502 |
+
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
|
503 |
+
<line x1="12" y1="19" x2="12" y2="23"></line>
|
504 |
+
<line x1="8" y1="23" x2="16" y2="23"></line>
|
505 |
+
</svg>
|
506 |
+
Stop Listening
|
507 |
+
`; // Replace with your SVG
|
508 |
+
}
|
509 |
+
});
|
510 |
+
} else {
|
511 |
+
alert('Your browser does not support the Web Speech API.');
|
512 |
+
}
|
513 |
+
|
514 |
+
// Add to conversation history
|
515 |
+
const addToConversationHistory = (role, content) => {
|
516 |
+
if (conversationHistory.length > 0 &&
|
517 |
+
conversationHistory[conversationHistory.length - 1].role === 'assistant' &&
|
518 |
+
conversationHistory[conversationHistory.length - 1].content === "") {
|
519 |
+
conversationHistory.pop();
|
520 |
+
}
|
521 |
+
|
522 |
+
conversationHistory.push({ role, content });
|
523 |
+
|
524 |
+
if (conversationHistory.length > 6) conversationHistory.splice(0, 2);
|
525 |
+
};
|
526 |
+
|
527 |
+
// Process the final speech transcript
|
528 |
+
const processSpeechTranscript = (transcript) => {
|
529 |
+
const trimmedTranscript = transcript.trimStart();
|
530 |
+
if (trimmedTranscript !== '' && !isRequestInProgress) {
|
531 |
+
activeQuery = trimmedTranscript;
|
532 |
+
sendQueryToAI(activeQuery);
|
533 |
+
addToConversationHistory('user', activeQuery);
|
534 |
+
transcriptDiv.textContent = '';
|
535 |
+
}
|
536 |
+
};
|
537 |
+
|
538 |
+
// Interrupt audio playback
|
539 |
+
const interruptAudioPlayback = (reason = 'unknown') => {
|
540 |
+
console.log(`Interrupting audio (reason: ${reason})...`);
|
541 |
+
if (currentAudio) {
|
542 |
+
currentAudio.pause();
|
543 |
+
currentAudio.currentTime = 0;
|
544 |
+
currentAudio = null;
|
545 |
+
}
|
546 |
+
|
547 |
+
audioPlaybackQueue.length = 0;
|
548 |
+
isRequestInProgress = false;
|
549 |
+
|
550 |
+
if (requestAbortController) {
|
551 |
+
requestAbortController.abort();
|
552 |
+
requestAbortController = null;
|
553 |
+
}
|
554 |
+
|
555 |
+
prefetchCache.clear();
|
556 |
+
prefetchQueue.length = 0;
|
557 |
+
updateActivityIndicators();
|
558 |
+
};
|
559 |
+
|
560 |
+
// Cancel pending prefetch requests
|
561 |
+
const cancelPrefetchRequests = (query) => {
|
562 |
+
const normalizedQuery = normalizeQueryText(query);
|
563 |
+
|
564 |
+
for (const [cacheKey, abortController] of pendingPrefetchRequests) {
|
565 |
+
if (cacheKey.startsWith(normalizedQuery)) {
|
566 |
+
abortController.abort();
|
567 |
+
pendingPrefetchRequests.delete(cacheKey);
|
568 |
+
}
|
569 |
+
}
|
570 |
+
};
|
571 |
+
|
572 |
+
// Update latency display
|
573 |
+
const updateLatency = () => {
|
574 |
+
if (firstResponseTextTimestamp) {
|
575 |
+
const latency = firstResponseTextTimestamp - queryStartTime;
|
576 |
+
responseTimeDisplay.textContent = `Latency: ${latency}ms`;
|
577 |
+
} else {
|
578 |
+
responseTimeDisplay.textContent = "Latency: 0ms";
|
579 |
+
}
|
580 |
+
};
|
581 |
+
|
582 |
+
setInterval(updateLatency, 200);
|
secure.js
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?module.exports=b():"function"==typeof define&&define.amd?define(b):(a="undefined"==typeof globalThis?a||self:globalThis,a.DisableDevtool=b())})(this,function(){"use strict";function a(){if(p.redirectUrl)window.location.href=p.redirectUrl;else if(p.replacementHTML)try{document.documentElement.innerHTML=p.replacementHTML}catch(a){document.documentElement.innerText=p.replacementHTML}else{try{window.opener=null,window.open("","_self"),window.close(),window.history.back()}catch(a){console.log(a)}setTimeout(function(){window.location.href=p.timeoutRedirectUrl||`https://theajack.github.io/disable-devtool/404.html?h=${encodeURIComponent(location.host)}`},500)}}function b(a={}){for(const b in p)void 0!==a[b]&&(s(p[b])===s(a[b])||-1!==q.indexOf(b))&&(p[b]=a[b]);"function"==typeof p.onDevToolClose&&!0===p.stopIntervalOnOpen&&(p.stopIntervalOnOpen=!1,console.warn("stopIntervalOnOpen is invalid when using onDevToolClose"))}function c(){const a=navigator.userAgent.toLowerCase(),b="number"==typeof navigator.maxTouchPoints?1<navigator.maxTouchPoints:/android|iphone|ipad|ipod|arch/i.test(navigator.platform.toLowerCase())||/(iphone|ipad|ipod|ios|android)/i.test(a),c=!!window.top&&window!==window.top,d=a.includes("qqbrowser"),e=a.includes("firefox"),f=a.includes("macintosh"),g=a.includes("edge"),h=g&&!a.includes("chrome"),i=h||a.includes("trident")||a.includes("msie"),j=a.includes("crios"),k=a.includes("edgios"),l=a.includes("chrome")||j,m=!b&&/(googlebot|baiduspider|bingbot|applebot|petalbot|yandexbot|bytespider|chrome\-lighthouse|moto g power)/i.test(a);Object.assign(r,{isWithinIframe:c,isPC:!b,isQQBrowser:d,isFirefox:e,isMacOS:f,isEdge:g,isLegacyEdge:h,isIE:i,isIOSChrome:j,isIOSEdge:k,isChrome:l,isSearchBot:m,isMobile:b})}function d(){const a={};for(let b=0;500>b;b++)a[`${b}`]=`${b}`;const b=[];for(let c=0;50>c;c++)b.push(a);return b}function e(){p.clearConsoleOnInit&&x()}function f(a){z[a]=!1}function g(){for(const a in z)if(z[a])return y=!0;return y=!1}function h(){const a=p.ignoreUrlPatterns;if(a){if("function"==typeof a)return a();if(0!==a.length){const b=location.href;if(A===b)return B;A=b;for(const c of a)if("string"==typeof c&&b.includes(c)||c instanceof RegExp&&c.test(b))return B=!0}}return B=!1}function i(a){const b=r.isMacOS,c=b?a=>a.metaKey&&a.altKey&&(73===a.keyCode||105===a.keyCode):a=>a.ctrlKey&&a.shiftKey&&(73===a.keyCode||105===a.keyCode),d=b?a=>a.metaKey&&a.altKey&&85===a.keyCode||a.metaKey&&83===a.keyCode:a=>a.ctrlKey&&(83===a.keyCode||85===a.keyCode);a.addEventListener("keydown",function(b){const e=b.keyCode||b.which;if(123===e||c(b)||d(b))return l(a,b)},!0),p.disableRightClick&&a.addEventListener("contextmenu",function(b){if("touch"!==b.pointerType)return l(a,b)},!1),p.disableTextSelect&&j(a,"selectstart"),p.disableCopy&&j(a,"copy"),p.disableCut&&j(a,"cut"),p.disablePaste&&j(a,"paste")}function j(a,b){a.addEventListener(b,k)}function k(a){if(!h()&&!K.isDetectionSuspended)return a.preventDefault(),!1}function l(a,b){if(!h()&&!K.isDetectionSuspended)return(b=b||a.event).returnValue=!1,b.preventDefault(),!1}function m(){H=setInterval(()=>{if(!(K.isDetectionSuspended||h())){for(const a of F)f(a.type),a.detect(J++);if(e(),"function"==typeof p.onDevToolClose){const a=y;!g()&&a&&p.onDevToolClose()}}},p.checkInterval)}function n(){clearInterval(H)}function o(a){let b=0;for(let c=0;c<a.length;c++)b=(b<<5)-b+a.charCodeAt(c),b|=0;return b.toString(16)}const p={onDevToolOpen:a,onDevToolClose:null,redirectUrl:"",timeoutRedirectUrl:"",urlTokenParam:"ddtk",checkInterval:500,disableRightClick:!0,stopCheckAfter:5e3,stopIntervalOnOpen:!1,enabledDetectors:[0,1,3,4,5,6,7],clearConsoleOnInit:!0,disableTextSelect:!1,disableCopy:!1,disableCut:!1,disablePaste:!1,ignoreUrlPatterns:null,disableDevToolsInParentFrames:!1,detectSearchEngineBots:!1,replacementHTML:""},q=["enabledDetectors","onDevToolClose","ignoreUrlPatterns"],r={isWithinIframe:!1,isPC:!1,isQQBrowser:!1,isFirefox:!1,isMacOS:!1,isEdge:!1,isLegacyEdge:!1,isIE:!1,isIOSChrome:!1,isIOSEdge:!1,isChrome:!1,isSearchBot:!1,isMobile:!1},s=a=>"object"==typeof a&&null!==a,t=()=>new Date().getTime(),u=a=>{const b=t();return a(),t()-b};let v=console.log,w=console.table,x=console.clear;r.isIE&&(v=(...a)=>console.log(...a),w=(...a)=>console.table(...a),x=()=>console.clear());let y=!1;const z={};let A="",B=!1;const C={RegexToString:0,DefinePropertyID:1,WindowSize:2,DateToString:3,FunctionToString:4,DebuggerStatement:5,Performance:6,ExternalLibrary:7};class D{constructor({type:a,isEnabled:b=!0}){this.type=C.Unknown,this.isEnabled=!0,this.type=a,this.isEnabled=b,this.isEnabled&&(F.push(this),this.init())}onDevToolOpen(){console.warn(`DevTool usage is prohibited! [Detector: ${this.type}]`),p.stopIntervalOnOpen&&n(),clearTimeout(I),p.onDevToolOpen(this.type,a),z[this.type]=!0}init(){}detect(){}}class E extends D{constructor(){super({type:C.ExternalLibrary})}init(){}detect(){(window.eruda&&window.eruda._devTools&&window.eruda._devTools._isShow||window._vcOrigConsole&&window.document.querySelector("#__vconsole.vc-toggle"))&&this.onDevToolOpen()}static isExternalLibraryInUse(){return!!window.eruda||!!window._vcOrigConsole}}const F=[],G={[C.RegexToString]:class a extends D{constructor(){super({type:C.RegexToString,isEnabled:r.isQQBrowser||r.isFirefox})}init(){this.lastDetection=0,this.regex=/./,v(this.regex),this.regex.toString=()=>{if(r.isQQBrowser){const a=t();this.lastDetection&&100>a-this.lastDetection?this.onDevToolOpen():this.lastDetection=a}else r.isFirefox&&this.onDevToolOpen();return""}}detect(){v(this.regex)}},[C.DefinePropertyID]:class a extends D{constructor(){super({type:C.DefinePropertyID})}init(){this.div=document.createElement("div"),this.div.__defineGetter__("id",()=>this.onDevToolOpen()),Object.defineProperty(this.div,"id",{get:()=>this.onDevToolOpen()})}detect(){v(this.div)}},[C.WindowSize]:class a extends D{constructor(){super({type:C.WindowSize,isEnabled:!r.isWithinIframe&&!r.isEdge})}init(){this.checkWindowSize(),window.addEventListener("resize",()=>{setTimeout(()=>this.checkWindowSize(),100)},!0)}detect(){}checkWindowSize(){const a=window.devicePixelRatio||window.screen&&window.screen.deviceXDPI&&window.screen.logicalXDPI&&window.screen.deviceXDPI/window.screen.logicalXDPI;if(!1!==a){const b=200<window.outerWidth-window.innerWidth*a,c=300<window.outerHeight-window.innerHeight*a;if(b||c)return this.onDevToolOpen(),!1;f(this.type)}return!0}},[C.DateToString]:class a extends D{constructor(){super({type:C.DateToString,isEnabled:!r.isIOSChrome&&!r.isIOSEdge})}init(){this.modifyCount=0,this.date=new Date,this.date.toString=()=>(this.modifyCount++,"")}detect(){this.modifyCount=0,v(this.date),e(),2<=this.modifyCount&&this.onDevToolOpen()}},[C.FunctionToString]:class a extends D{constructor(){super({type:C.FunctionToString,isEnabled:!r.isIOSChrome&&!r.isIOSEdge})}init(){this.modifyCount=0,this.func=function(){},this.func.toString=()=>(this.modifyCount++,"")}detect(){this.modifyCount=0,v(this.func),e(),2<=this.modifyCount&&this.onDevToolOpen()}},[C.DebuggerStatement]:class a extends D{constructor(){super({type:C.DebuggerStatement,isEnabled:r.isIOSChrome||r.isIOSEdge})}detect(){const a=t();debugger;100<t()-a&&this.onDevToolOpen()}},[C.Performance]:class a extends D{constructor(){super({type:C.Performance,isEnabled:r.isChrome||!r.isMobile})}init(){this.maxPrintTime=0,this.largeArray=d()}detect(){const a=u(()=>w(this.largeArray)),b=u(()=>v(this.largeArray));return this.maxPrintTime=Math.max(this.maxPrintTime,b),e(),0!==a&&0!==this.maxPrintTime&&void(a>10*this.maxPrintTime&&this.onDevToolOpen())}},[C.ExternalLibrary]:E};let H=0,I=0,J=0;const K=Object.assign(function(a={}){function d(a=""){return{success:!a,reason:a}}if(K.isRunning)return d("already running");c(),x=r.isIE?console.clear:console.clear,b(a);const e=new URLSearchParams(window.location.search).get(p.urlTokenParam);if(e&&o(e)===o(window.location.href))return d("token passed");if(p.detectSearchEngineBots&&r.isSearchBot)return d("search bot");K.isRunning=!0,m();const f=window.top,g=window.parent;if(p.disableDevToolsInParentFrames&&f&&g&&f!==window){for(let a=window;a!==f;)i(a),a=a.parent;i(f)}i(window);const h="all"===p.enabledDetectors?Object.keys(G):p.enabledDetectors;return h.forEach(a=>{new G[a]}),I=setTimeout(function(){r.isPC||E.isExternalLibraryInUse()||n()},p.stopCheckAfter),d()},{isRunning:!1,isDetectionSuspended:!1,calculateMD5:o,version:"0.0.1",DetectorType:C,isDevToolOpened:g}),L=function(){if("undefined"==typeof window||!window.document)return null;const a=document.querySelector("[secura-auto]");if(!a)return null;const b=["disable-menu","disable-select","disable-copy","disable-cut","disable-paste","clear-log"],c=["interval"],d={};["md5","url","tk-name","detectors",...b,...c].forEach(e=>{const f=a.getAttribute(e);if(null!==f){const a=e.split("-").map((a,b)=>0===b?a:a.charAt(0).toUpperCase()+a.slice(1)).join("");d[a]=c.includes(e)?parseInt(f):b.includes(e)?"false"!==f:"detectors"===e&&"all"!==f?f.split(" "):f}});const e={md5:"urlToken",url:"redirectUrl","tk-name":"urlTokenParam",disableMenu:"disableRightClick",disableSelect:"disableTextSelect",clearLog:"clearConsoleOnInit",interval:"checkInterval"};for(const a in e)void 0!==d[a]&&(d[e[a]]=d[a],delete d[a]);return d}();return L&&K(L),K});
|
|
|
|