Spaces:
Runtime error
Runtime error
Update index.js
Browse files
index.js
CHANGED
@@ -36,7 +36,9 @@ const CONFIG = {
|
|
36 |
"grok-3": "grok-3",
|
37 |
"grok-3-deepsearch": "grok-3",
|
38 |
"grok-3-reasoning": "grok-3",
|
39 |
-
}
|
|
|
|
|
40 |
};
|
41 |
|
42 |
// HTTP 请求头配置
|
@@ -296,8 +298,8 @@ class ResponseHandler {
|
|
296 |
|
297 |
const reader = response.body;
|
298 |
let buffer = '';
|
299 |
-
|
300 |
-
|
301 |
|
302 |
try {
|
303 |
for await (const chunk of reader) {
|
@@ -306,7 +308,7 @@ class ResponseHandler {
|
|
306 |
|
307 |
for (const line of lines) {
|
308 |
if (!line.trim()) continue;
|
309 |
-
await this.processStreamLine(JSON.parse(line), model, res
|
310 |
}
|
311 |
}
|
312 |
|
@@ -318,19 +320,19 @@ class ResponseHandler {
|
|
318 |
}
|
319 |
}
|
320 |
|
321 |
-
static async processStreamLine(jsonData, model, res
|
322 |
if (jsonData.result?.doImgGen) {
|
323 |
-
|
324 |
return;
|
325 |
}
|
326 |
|
327 |
-
if (
|
328 |
await this.handleImageGeneration(jsonData, model, res);
|
329 |
return;
|
330 |
}
|
331 |
|
332 |
-
if (!
|
333 |
-
await this.handleTextMessage(jsonData, model, res
|
334 |
}
|
335 |
}
|
336 |
|
@@ -354,17 +356,17 @@ class ResponseHandler {
|
|
354 |
res.write(`data: ${JSON.stringify(responseData)}\n\n`);
|
355 |
}
|
356 |
|
357 |
-
static async handleTextMessage(jsonData, model, res
|
358 |
let message = jsonData.result.message;
|
359 |
|
360 |
switch (model) {
|
361 |
case "grok-3-reasoning":
|
362 |
-
if (!
|
363 |
message = "<think>" + message;
|
364 |
-
|
365 |
-
} else if (
|
366 |
message = "</think>" + message;
|
367 |
-
|
368 |
}
|
369 |
break;
|
370 |
case "grok-3-deepsearch":
|
@@ -381,7 +383,6 @@ class ResponseHandler {
|
|
381 |
let buffer = '';
|
382 |
let fullResponse = '';
|
383 |
let imageUrl = null;
|
384 |
-
let isThinking = false;
|
385 |
|
386 |
try {
|
387 |
for await (const chunk of reader) {
|
@@ -390,10 +391,10 @@ class ResponseHandler {
|
|
390 |
|
391 |
for (const line of lines) {
|
392 |
if (!line.trim()) continue;
|
393 |
-
const result = await this.processNormalLine(JSON.parse(line), model,
|
394 |
fullResponse += result.text || '';
|
395 |
imageUrl = result.imageUrl || imageUrl;
|
396 |
-
|
397 |
}
|
398 |
}
|
399 |
|
@@ -480,7 +481,7 @@ app.use(express.json({ limit: CONFIG.SERVER.BODY_LIMIT }));
|
|
480 |
app.use(express.urlencoded({ extended: true, limit: CONFIG.SERVER.BODY_LIMIT }));
|
481 |
|
482 |
// API 路由
|
483 |
-
app.get('/
|
484 |
res.json({
|
485 |
object: "list",
|
486 |
data: Object.keys(CONFIG.MODELS).map(model => ({
|
@@ -492,7 +493,7 @@ app.get('/hf/v1/models', (req, res) => {
|
|
492 |
});
|
493 |
});
|
494 |
|
495 |
-
app.post('/
|
496 |
try {
|
497 |
const authToken = req.headers.authorization?.replace('Bearer ', '');
|
498 |
if (authToken !== CONFIG.API.API_KEY) {
|
|
|
36 |
"grok-3": "grok-3",
|
37 |
"grok-3-deepsearch": "grok-3",
|
38 |
"grok-3-reasoning": "grok-3",
|
39 |
+
},
|
40 |
+
IS_IMG_GEN: false,
|
41 |
+
IS_THINKING: false
|
42 |
};
|
43 |
|
44 |
// HTTP 请求头配置
|
|
|
298 |
|
299 |
const reader = response.body;
|
300 |
let buffer = '';
|
301 |
+
CONFIG.IS_IMG_GEN = false;
|
302 |
+
CONFIG.IS_THINKING = false;
|
303 |
|
304 |
try {
|
305 |
for await (const chunk of reader) {
|
|
|
308 |
|
309 |
for (const line of lines) {
|
310 |
if (!line.trim()) continue;
|
311 |
+
await this.processStreamLine(JSON.parse(line), model, res);
|
312 |
}
|
313 |
}
|
314 |
|
|
|
320 |
}
|
321 |
}
|
322 |
|
323 |
+
static async processStreamLine(jsonData, model, res) {
|
324 |
if (jsonData.result?.doImgGen) {
|
325 |
+
CONFIG.IS_IMG_GEN = true;
|
326 |
return;
|
327 |
}
|
328 |
|
329 |
+
if (CONFIG.IS_IMG_GEN && jsonData.result?.event?.imageAttachmentUpdate?.progress === 100) {
|
330 |
await this.handleImageGeneration(jsonData, model, res);
|
331 |
return;
|
332 |
}
|
333 |
|
334 |
+
if (!CONFIG.IS_IMG_GEN && jsonData.result?.message) {
|
335 |
+
await this.handleTextMessage(jsonData, model, res);
|
336 |
}
|
337 |
}
|
338 |
|
|
|
356 |
res.write(`data: ${JSON.stringify(responseData)}\n\n`);
|
357 |
}
|
358 |
|
359 |
+
static async handleTextMessage(jsonData, model, res) {
|
360 |
let message = jsonData.result.message;
|
361 |
|
362 |
switch (model) {
|
363 |
case "grok-3-reasoning":
|
364 |
+
if (!CONFIG.IS_THINKING && jsonData.result?.isThinking) {
|
365 |
message = "<think>" + message;
|
366 |
+
CONFIG.IS_THINKING = true;
|
367 |
+
} else if (CONFIG.IS_THINKING && !jsonData.result?.isThinking) {
|
368 |
message = "</think>" + message;
|
369 |
+
CONFIG.IS_THINKING = false;
|
370 |
}
|
371 |
break;
|
372 |
case "grok-3-deepsearch":
|
|
|
383 |
let buffer = '';
|
384 |
let fullResponse = '';
|
385 |
let imageUrl = null;
|
|
|
386 |
|
387 |
try {
|
388 |
for await (const chunk of reader) {
|
|
|
391 |
|
392 |
for (const line of lines) {
|
393 |
if (!line.trim()) continue;
|
394 |
+
const result = await this.processNormalLine(JSON.parse(line), model, CONFIG.IS_THINKING);
|
395 |
fullResponse += result.text || '';
|
396 |
imageUrl = result.imageUrl || imageUrl;
|
397 |
+
CONFIG.IS_THINKING = result.isThinking;
|
398 |
}
|
399 |
}
|
400 |
|
|
|
481 |
app.use(express.urlencoded({ extended: true, limit: CONFIG.SERVER.BODY_LIMIT }));
|
482 |
|
483 |
// API 路由
|
484 |
+
app.get('/v1/models', (req, res) => {
|
485 |
res.json({
|
486 |
object: "list",
|
487 |
data: Object.keys(CONFIG.MODELS).map(model => ({
|
|
|
493 |
});
|
494 |
});
|
495 |
|
496 |
+
app.post('/v1/chat/completions', async (req, res) => {
|
497 |
try {
|
498 |
const authToken = req.headers.authorization?.replace('Bearer ', '');
|
499 |
if (authToken !== CONFIG.API.API_KEY) {
|