File size: 3,399 Bytes
6aaf221 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
import http from 'http';
import { sendChat, sendChatStream } from './chatwrapper';
import { mapRequest, mapResponse, mapStreamChunk } from './mapper';
/* ββ basic config βββββββββββββββββββββββββββββββββββββββββββββββββββ */
const PORT = Number(process.env.PORT ?? 11434);
/* ββ CORS helper ββββββββββββββββββββββββββββββββββββββββββββββββββββ */
function allowCors(res: http.ServerResponse) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Headers', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET,POST,OPTIONS');
}
/* ββ JSON body helper βββββββββββββββββββββββββββββββββββββββββββββββ */
function readJSON(
req: http.IncomingMessage,
res: http.ServerResponse,
): Promise<any | null> {
return new Promise((resolve) => {
let data = '';
req.on('data', (c) => (data += c));
req.on('end', () => {
try {
resolve(data ? JSON.parse(data) : {});
} catch {
res.writeHead(400).end(); // malformed JSON
resolve(null);
}
});
});
}
/* ββ server βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */
http
.createServer(async (req, res) => {
allowCors(res);
/* -------- pre-flight ---------- */
if (req.method === 'OPTIONS') {
res.writeHead(204).end();
return;
}
/* -------- /v1/models ---------- */
if (req.url === '/v1/models') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(
JSON.stringify({
data: [
{
id: 'gemini-2.5-pro-latest',
object: 'model',
owned_by: 'google',
},
],
}),
);
return;
}
/* ---- /v1/chat/completions ---- */
if (req.url === '/v1/chat/completions' && req.method === 'POST') {
const body = await readJSON(req, res);
if (!body) return;
try {
const { geminiReq, tools } = await mapRequest(body);
if (body.stream) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
});
for await (const chunk of sendChatStream({ ...geminiReq, tools })) {
res.write(`data: ${JSON.stringify(mapStreamChunk(chunk))}\n\n`);
}
res.end('data: [DONE]\n\n');
} else {
const gResp = await sendChat({ ...geminiReq, tools });
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(mapResponse(gResp)));
}
} catch (err: any) {
console.error('Proxy error β', err);
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: { message: err.message } }));
}
return;
}
/* ---- anything else ---------- */
res.writeHead(404).end();
})
.listen(PORT, () => console.log(`OpenAI proxy on :${PORT}`));
|