Spaces:
Paused
Paused
| import express from 'express'; | |
| import cors from 'cors'; | |
| import fetch from 'node-fetch'; | |
| const app = express(); | |
| // CORS | |
| app.use(cors({ | |
| origin: '*', | |
| methods: ['GET', 'POST', 'OPTIONS'], | |
| allowedHeaders: ['Content-Type', 'Authorization'] | |
| })); | |
| app.use(express.json()); | |
| app.options('*', (req, res) => res.sendStatus(204)); | |
| // Список моделей | |
| app.get(['/models', '/v1/models'], (req, res) => { | |
| res.json({ | |
| object: 'list', | |
| data: [ | |
| // OpenAI — GPT‑4.1 / 4.5 | |
| { id: 'gpt-4.1', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.1-2025-04-14', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.1-mini', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.1-mini-2025-04-14', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.1-nano', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.1-nano-2025-04-14', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.5-preview', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4.5-preview-2025-02-27', object: 'model', created: 0, owned_by: 'helix' }, | |
| // OpenAI — GPT‑4o | |
| { id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-2024-05-13', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-2024-08-06', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-2024-11-20', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-mini', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-mini-2024-07-18', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-search-preview', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-mini-search-preview', object: 'model', created: 0, owned_by: 'helix' }, | |
| { id: 'gpt-4o-mini-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' }, | |
| // Helix — GPT‑3.5 Turbo | |
| { id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' } | |
| ] | |
| }); | |
| }); | |
| // Прокси для чата | |
| app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => { | |
| const { | |
| model, | |
| messages = [], | |
| stream = false, | |
| temperature, | |
| top_p, | |
| presence_penalty, | |
| frequency_penalty, | |
| ...rest | |
| } = req.body; | |
| // Заменили префиксы "用户:" и "AI:" на английские | |
| const historyText = messages | |
| .map(m => (m.role === 'user' ? 'User: ' : 'Assistant: ') + m.content) | |
| .join('\n'); | |
| const helixPayload = { | |
| type: 'text', | |
| stream, | |
| provider: getProvider(model), | |
| model, | |
| messages: [ | |
| { | |
| role: 'user', | |
| content: { content_type: 'text', parts: [historyText] } | |
| } | |
| ], | |
| temperature, | |
| top_p, | |
| presence_penalty, | |
| frequency_penalty, | |
| ...rest | |
| }; | |
| // Отправляем в Helix | |
| const helixRes = await fetch('https://app.tryhelix.ai/api/v1/sessions/chat', { | |
| method: 'POST', | |
| headers: { | |
| 'Content-Type': 'application/json', | |
| Authorization: req.header('authorization') || '' | |
| }, | |
| body: JSON.stringify(helixPayload) | |
| }); | |
| if (!stream) { | |
| const data = await helixRes.json(); | |
| const reply = data?.choices?.[0]?.message?.content ?? ''; | |
| return res.status(helixRes.status).json({ | |
| id: `chatcmpl-proxy-${data.id ?? Date.now()}`, | |
| object: 'chat.completion', | |
| created: Math.floor(Date.now() / 1000), | |
| model, | |
| choices: [ | |
| { | |
| index: 0, | |
| message: { role: 'assistant', content: reply }, | |
| finish_reason: 'stop' | |
| } | |
| ] | |
| }); | |
| } | |
| // Если нужен стрим — прокидываем SSE напрямую | |
| res.status(helixRes.status); | |
| res.set('Content-Type', 'text/event-stream'); | |
| helixRes.body.pipe(res); | |
| }); | |
| function getProvider(m) { | |
| if (/^gpt-[34]|^gpt-3\.5/.test(m)) return 'openai'; | |
| if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(m)) return 'helix'; | |
| return 'togetherai'; | |
| } | |
| const PORT = process.env.PORT || 7860; | |
| app.listen(PORT, () => { | |
| console.log(`🚀 Server listening on port ${PORT}`); | |
| }); |