Spaces:
Sleeping
Sleeping
| import os | |
| import time | |
| import json | |
| import gradio as gr | |
| import requests | |
| from datetime import datetime | |
| # ========================= | |
| # ⚙️ НАСТРОЙКИ | |
| # ========================= | |
| HF_TOKEN = os.getenv("HF_TOKEN") or "hf_your_token_here" | |
| PRIMARY_MODEL = "google/gemma-2-2b-it" | |
| FALLBACK_MODEL = "mistralai/Mixtral-8x7B-Instruct-v0.1" | |
| ROUTER_URL = "https://router.huggingface.co" | |
| HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| # ========================= | |
| # 🧩 ПОЛЕЗНЫЕ ФУНКЦИИ | |
| # ========================= | |
| def check_token(): | |
| """Проверка валидности токена Hugging Face.""" | |
| try: | |
| res = requests.get("https://router.huggingface.co/status", headers=HEADERS, timeout=8) | |
| if res.status_code == 200: | |
| return True | |
| else: | |
| print(f"⚠️ Токен Hugging Face невалиден ({res.status_code})") | |
| return False | |
| except Exception as e: | |
| print(f"Ошибка при проверке токена: {e}") | |
| return False | |
| def send_request(model: str, prompt: str): | |
| """Отправка запроса к модели через Router API.""" | |
| payload = {"model": model, "inputs": prompt, "options": {"use_cache": True}} | |
| try: | |
| start = time.time() | |
| response = requests.post(ROUTER_URL, headers=HEADERS, json=payload, timeout=60) | |
| latency = time.time() - start | |
| if response.status_code == 200: | |
| data = response.json() | |
| # Универсальный парсинг ответа | |
| if isinstance(data, list) and len(data) > 0 and "generated_text" in data[0]: | |
| text = data[0]["generated_text"] | |
| elif isinstance(data, dict) and "generated_text" in data: | |
| text = data["generated_text"] | |
| else: | |
| text = str(data) | |
| return text.strip(), latency, model, None | |
| else: | |
| return None, latency, model, f"Ошибка API {response.status_code}: {response.text}" | |
| except Exception as e: | |
| return None, 0, model, str(e) | |
| def generate_text(prompt: str): | |
| """Основная функция: попытка через основную модель → fallback при ошибке.""" | |
| if not check_token(): | |
| return "❌ Токен Hugging Face недействителен. Проверьте переменную HF_TOKEN." | |
| # 1️⃣ Основная модель | |
| output, latency, used_model, error = send_request(PRIMARY_MODEL, prompt) | |
| if output: | |
| return render_output(output, used_model, latency, success=True) | |
| # 2️⃣ Fallback при ошибке | |
| output_fb, latency_fb, model_fb, error_fb = send_request(FALLBACK_MODEL, prompt) | |
| if output_fb: | |
| return render_output(output_fb, model_fb, latency_fb, success=True, fallback=True) | |
| else: | |
| return f"❌ Ошибка при выполнении запроса:\n- {error}\n- Fallback: {error_fb}" | |
| def render_output(text, model, latency, success=False, fallback=False): | |
| """Форматированный вывод результата.""" | |
| emoji = "✅" if success else "⚠️" | |
| fb_text = " (через fallback)" if fallback else "" | |
| return ( | |
| f"{emoji} **Модель:** `{model}`{fb_text}\n" | |
| f"⏱ **Время отклика:** {latency:.2f} сек\n\n" | |
| f"🧠 **Ответ:**\n{text.strip()}" | |
| ) | |
| # ========================= | |
| # 🧭 GRADIO UI | |
| # ========================= | |
| with gr.Blocks(title="🤖 Eroha AgentAPI v5.7 — Stable Router Edition") as demo: | |
| gr.Markdown("## 🧠 Eroha AgentAPI v5.7 — Stable Router Edition\n" | |
| "Поддержка Router API + AutoFallback + Token Validation 🌐") | |
| with gr.Row(): | |
| prompt = gr.Textbox(label="Введите запрос", placeholder="Например: 'Расскажи историю про ИИ, который научился понимать чувства.'", lines=3) | |
| output = gr.Markdown(label="Ответ") | |
| btn = gr.Button("🚀 Отправить", variant="primary") | |
| btn.click(generate_text, inputs=prompt, outputs=output) | |
| gr.Markdown("---") | |
| gr.Markdown("🧩 **Eroha Router Core v5.7** | Автоопределение моделей + безопасный fallback") | |
| demo.launch(server_name="0.0.0.0", server_port=7860) | |