Spaces:
Running
Running
Commit Β·
af9e60e
0
Parent(s):
Initial commit
Browse files- .env.example +185 -0
- CHANGELOG.md +43 -0
- CODE_OF_CONDUCT.md +27 -0
- CONTRIBUTING.md +46 -0
- Dockerfile +53 -0
- LICENSE +21 -0
- README.md +426 -0
- SECURITY.md +28 -0
- dns-fix.js +19 -0
- health-server.js +27 -0
- keep-alive.sh +34 -0
- start.sh +330 -0
- workspace-sync.py +149 -0
.env.example
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
+
# π¦ HuggingClaw β OpenClaw Gateway for HuggingFace Spaces
|
| 3 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 4 |
+
# Copy this file to .env and fill in your values.
|
| 5 |
+
# For local development: cp .env.example .env && nano .env
|
| 6 |
+
|
| 7 |
+
# ββ REQUIRED: Core Configuration ββ
|
| 8 |
+
# [REQUIRED] LLM provider API key
|
| 9 |
+
# - Anthropic: sk-ant-v0-...
|
| 10 |
+
# - OpenAI: sk-...
|
| 11 |
+
# - Google: AIzaSy...
|
| 12 |
+
# - OpenRouter: sk-or-v1-... (300+ models via single key)
|
| 13 |
+
LLM_API_KEY=your_api_key_here
|
| 14 |
+
|
| 15 |
+
# [REQUIRED] LLM model to use (format: provider/model-name)
|
| 16 |
+
# Auto-detects provider from prefix β any provider is supported!
|
| 17 |
+
# Provider IDs from OpenClaw docs: docs.openclaw.ai/concepts/model-providers
|
| 18 |
+
#
|
| 19 |
+
# ββ Core Providers ββ
|
| 20 |
+
#
|
| 21 |
+
# Anthropic (ANTHROPIC_API_KEY):
|
| 22 |
+
# - anthropic/claude-opus-4-6
|
| 23 |
+
# - anthropic/claude-sonnet-4-6
|
| 24 |
+
# - anthropic/claude-sonnet-4-5
|
| 25 |
+
# - anthropic/claude-haiku-4-5
|
| 26 |
+
#
|
| 27 |
+
# OpenAI (OPENAI_API_KEY):
|
| 28 |
+
# - openai/gpt-5.4-pro
|
| 29 |
+
# - openai/gpt-5.4
|
| 30 |
+
# - openai/gpt-5.4-mini
|
| 31 |
+
# - openai/gpt-5.4-nano
|
| 32 |
+
# - openai/gpt-4.1
|
| 33 |
+
# - openai/gpt-4.1-mini
|
| 34 |
+
#
|
| 35 |
+
# Google Gemini (GEMINI_API_KEY):
|
| 36 |
+
# - google/gemini-3.1-pro-preview
|
| 37 |
+
# - google/gemini-3-flash-preview
|
| 38 |
+
# - google/gemini-2.5-pro
|
| 39 |
+
# - google/gemini-2.5-flash
|
| 40 |
+
#
|
| 41 |
+
# DeepSeek (DEEPSEEK_API_KEY):
|
| 42 |
+
# - deepseek/deepseek-v3.2
|
| 43 |
+
# - deepseek/deepseek-r1-0528
|
| 44 |
+
# - deepseek/deepseek-r1
|
| 45 |
+
#
|
| 46 |
+
# ββ OpenCode Providers ββ
|
| 47 |
+
#
|
| 48 |
+
# OpenCode Zen β tested & verified models (OPENCODE_API_KEY):
|
| 49 |
+
# - opencode/claude-opus-4-6
|
| 50 |
+
# - opencode/gpt-5.4
|
| 51 |
+
# Get key from: https://opencode.ai/auth
|
| 52 |
+
#
|
| 53 |
+
# OpenCode Go β low-cost open models (OPENCODE_API_KEY):
|
| 54 |
+
# - opencode-go/kimi-k2.5
|
| 55 |
+
#
|
| 56 |
+
# ββ Gateway/Router Providers ββ
|
| 57 |
+
#
|
| 58 |
+
# OpenRouter β 300+ models via single API key (OPENROUTER_API_KEY):
|
| 59 |
+
# - openrouter/anthropic/claude-sonnet-4-6
|
| 60 |
+
# - openrouter/openai/gpt-5.4
|
| 61 |
+
# - openrouter/deepseek/deepseek-v3.2
|
| 62 |
+
# - openrouter/meta-llama/llama-3.3-70b-instruct:free
|
| 63 |
+
# Get key from: https://openrouter.ai
|
| 64 |
+
#
|
| 65 |
+
# Kilo Gateway (KILOCODE_API_KEY):
|
| 66 |
+
# - kilocode/anthropic/claude-opus-4.6
|
| 67 |
+
#
|
| 68 |
+
# ββ Chinese/Asian Providers ββ
|
| 69 |
+
#
|
| 70 |
+
# Z.ai / GLM (ZAI_API_KEY) β OpenClaw normalizes z-ai/z.ai β zai:
|
| 71 |
+
# - zai/glm-5
|
| 72 |
+
# - zai/glm-5-turbo
|
| 73 |
+
# - zai/glm-4.7
|
| 74 |
+
# - zai/glm-4.7-flash
|
| 75 |
+
#
|
| 76 |
+
# Moonshot / Kimi (MOONSHOT_API_KEY):
|
| 77 |
+
# - moonshot/kimi-k2.5
|
| 78 |
+
# - moonshot/kimi-k2-thinking
|
| 79 |
+
#
|
| 80 |
+
# MiniMax (MINIMAX_API_KEY):
|
| 81 |
+
# - minimax/minimax-m2.7
|
| 82 |
+
# - minimax/minimax-m2.5
|
| 83 |
+
#
|
| 84 |
+
# Xiaomi / MiMo (XIAOMI_API_KEY):
|
| 85 |
+
# - xiaomi/mimo-v2-pro
|
| 86 |
+
# - xiaomi/mimo-v2-omni
|
| 87 |
+
#
|
| 88 |
+
# Volcengine / Doubao (VOLCANO_ENGINE_API_KEY):
|
| 89 |
+
# - volcengine/doubao-seed-1-8-251228
|
| 90 |
+
# - volcengine/kimi-k2-5-260127
|
| 91 |
+
#
|
| 92 |
+
# BytePlus β international (BYTEPLUS_API_KEY):
|
| 93 |
+
# - byteplus/seed-1-8-251228
|
| 94 |
+
#
|
| 95 |
+
# ββ Western Providers ββ
|
| 96 |
+
#
|
| 97 |
+
# Mistral (MISTRAL_API_KEY):
|
| 98 |
+
# - mistral/mistral-large-latest
|
| 99 |
+
# - mistral/mistral-small-2603
|
| 100 |
+
# - mistral/devstral-medium
|
| 101 |
+
#
|
| 102 |
+
# xAI / Grok (XAI_API_KEY):
|
| 103 |
+
# - xai/grok-4.20-beta
|
| 104 |
+
# - xai/grok-4
|
| 105 |
+
#
|
| 106 |
+
# NVIDIA (NVIDIA_API_KEY):
|
| 107 |
+
# - nvidia/nemotron-3-super-120b-a12b
|
| 108 |
+
#
|
| 109 |
+
# Groq (GROQ_API_KEY):
|
| 110 |
+
# - groq/mixtral-8x7b-32768
|
| 111 |
+
#
|
| 112 |
+
# Cohere (COHERE_API_KEY):
|
| 113 |
+
# - cohere/command-a
|
| 114 |
+
#
|
| 115 |
+
# Together (TOGETHER_API_KEY):
|
| 116 |
+
# - together/meta-llama/llama-3.3-70b-instruct
|
| 117 |
+
#
|
| 118 |
+
# Cerebras (CEREBRAS_API_KEY):
|
| 119 |
+
# - cerebras/zai-glm-4.7
|
| 120 |
+
#
|
| 121 |
+
# HuggingFace Inference (HUGGINGFACE_HUB_TOKEN):
|
| 122 |
+
# - huggingface/deepseek-ai/DeepSeek-R1
|
| 123 |
+
#
|
| 124 |
+
# Or any other OpenClaw-supported provider (format: provider/model-name)
|
| 125 |
+
LLM_MODEL=anthropic/claude-sonnet-4-5
|
| 126 |
+
|
| 127 |
+
# [REQUIRED] Gateway authentication token
|
| 128 |
+
# Generate: openssl rand -hex 32
|
| 129 |
+
GATEWAY_TOKEN=your_gateway_token_here
|
| 130 |
+
|
| 131 |
+
# (Optional) Password auth β simpler alternative to token for casual users
|
| 132 |
+
# If set, users can log in with this password instead of the token
|
| 133 |
+
# OPENCLAW_PASSWORD=your_password_here
|
| 134 |
+
|
| 135 |
+
# ββ OPTIONAL: Telegram Integration ββ
|
| 136 |
+
# Get bot token from: https://t.me/BotFather
|
| 137 |
+
TELEGRAM_BOT_TOKEN=your_bot_token_here
|
| 138 |
+
|
| 139 |
+
# Single user ID (from https://t.me/userinfobot)
|
| 140 |
+
TELEGRAM_USER_ID=123456789
|
| 141 |
+
|
| 142 |
+
# Multiple user IDs (comma-separated for team access)
|
| 143 |
+
# TELEGRAM_USER_IDS=123456789,987654321,555555555
|
| 144 |
+
|
| 145 |
+
# ββ OPTIONAL: Workspace Backup to HF Dataset ββ
|
| 146 |
+
HF_USERNAME=your_hf_username
|
| 147 |
+
HF_TOKEN=hf_your_token_here
|
| 148 |
+
|
| 149 |
+
# Backup dataset name (auto-created if missing)
|
| 150 |
+
# Default: huggingclaw-backup
|
| 151 |
+
BACKUP_DATASET_NAME=huggingclaw-backup
|
| 152 |
+
|
| 153 |
+
# Git commit identity for workspace syncs
|
| 154 |
+
WORKSPACE_GIT_USER=openclaw@example.com
|
| 155 |
+
WORKSPACE_GIT_NAME=OpenClaw Bot
|
| 156 |
+
|
| 157 |
+
# ββ OPTIONAL: Background Services ββ
|
| 158 |
+
# Keep-alive ping interval (seconds). Default: 300. Set 0 to disable.
|
| 159 |
+
KEEP_ALIVE_INTERVAL=300
|
| 160 |
+
|
| 161 |
+
# Workspace auto-sync interval (seconds). Default: 600.
|
| 162 |
+
SYNC_INTERVAL=600
|
| 163 |
+
|
| 164 |
+
# ββ OPTIONAL: Advanced ββ
|
| 165 |
+
# Pin OpenClaw version. Default: latest
|
| 166 |
+
OPENCLAW_VERSION=latest
|
| 167 |
+
|
| 168 |
+
# Health endpoint port. Default: 7861
|
| 169 |
+
HEALTH_PORT=7861
|
| 170 |
+
|
| 171 |
+
# Trusted proxies (comma-separated IPs)
|
| 172 |
+
# Fixes "Proxy headers detected from untrusted address" behind reverse proxies
|
| 173 |
+
# Only set if you see pairing/auth errors. Find IPs in Space logs (remote=x.x.x.x)
|
| 174 |
+
# TRUSTED_PROXIES=10.20.31.87,10.20.26.157
|
| 175 |
+
|
| 176 |
+
# Allowed origins for Control UI (comma-separated URLs)
|
| 177 |
+
# Locks down the web UI to only these origins
|
| 178 |
+
# ALLOWED_ORIGINS=https://your-space.hf.space
|
| 179 |
+
|
| 180 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 181 |
+
# QUICK START: Only 3 secrets required!
|
| 182 |
+
# 1. LLM_API_KEY β From your LLM provider
|
| 183 |
+
# 2. LLM_MODEL β Pick a model above
|
| 184 |
+
# 3. GATEWAY_TOKEN β Run: openssl rand -hex 32
|
| 185 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
CHANGELOG.md
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Changelog
|
| 2 |
+
|
| 3 |
+
All notable changes to this project will be documented in this file.
|
| 4 |
+
|
| 5 |
+
## [1.1.0] - 2026-03-31
|
| 6 |
+
|
| 7 |
+
### Added
|
| 8 |
+
- **Pre-built Docker image** β uses `ghcr.io/openclaw/openclaw:latest` multi-stage build for much faster builds (minutes instead of 30+)
|
| 9 |
+
- **Python huggingface_hub sync** β `workspace-sync.py` uses the `huggingface_hub` library for more reliable HF Dataset sync (handles auth, LFS, retries). Falls back to git-based sync automatically
|
| 10 |
+
- **Password auth** β `OPENCLAW_PASSWORD` for simpler login (optional alternative to token)
|
| 11 |
+
- **Trusted proxies** β `TRUSTED_PROXIES` env var fixes "Proxy headers detected from untrusted address" errors on HF Spaces
|
| 12 |
+
- **Allowed origins** β `ALLOWED_ORIGINS` env var to lock down Control UI access
|
| 13 |
+
- **40+ LLM providers** β Added support for OpenCode, OpenRouter, DeepSeek, Qwen, Z.ai, Moonshot, Mistral, xAI, NVIDIA, Volcengine, BytePlus, Cohere, Groq, HuggingFace Inference, and more
|
| 14 |
+
- **OpenCode Zen/Go** β support for OpenCode's tested model service
|
| 15 |
+
|
| 16 |
+
### Changed
|
| 17 |
+
- Provider detection now uses `case` statement (cleaner, faster) with correct OpenClaw provider IDs
|
| 18 |
+
- Model IDs now sourced from OpenClaw docs (not OpenRouter) for accuracy
|
| 19 |
+
- Google API key env var corrected to `GEMINI_API_KEY`
|
| 20 |
+
|
| 21 |
+
## [1.0.0] - 2026-03-30
|
| 22 |
+
|
| 23 |
+
### π Initial Release
|
| 24 |
+
|
| 25 |
+
#### Features
|
| 26 |
+
- **Any LLM provider** β Anthropic (Claude), OpenAI (GPT-4), Google (Gemini)
|
| 27 |
+
- **Telegram integration** β connect via @BotFather, supports multiple users
|
| 28 |
+
- **Built-in keep-alive** β self-pings to prevent HF Spaces 48h sleep
|
| 29 |
+
- **Auto-sync workspace** β commits + pushes to HF Dataset every 10 min
|
| 30 |
+
- **Auto-create backup** β creates HF Dataset automatically on first run
|
| 31 |
+
- **Graceful shutdown** β saves workspace before container stops
|
| 32 |
+
- **Health endpoint** β `/health` on port 7861 for monitoring
|
| 33 |
+
- **DNS fix** β bypasses HF Spaces internal DNS restrictions
|
| 34 |
+
- **Version pinning** β lock OpenClaw to a specific version
|
| 35 |
+
- **Startup banner** β clean summary of all running services
|
| 36 |
+
- **Zero-config defaults** β just 2 secrets to get started
|
| 37 |
+
|
| 38 |
+
#### Architecture
|
| 39 |
+
- `start.sh` β config generator + validation + orchestrator
|
| 40 |
+
- `keep-alive.sh` β self-ping background service
|
| 41 |
+
- `workspace-sync.sh` β periodic workspace backup
|
| 42 |
+
- `health-server.js` β lightweight health endpoint
|
| 43 |
+
- `dns-fix.js` β DNS override for HF network restrictions
|
CODE_OF_CONDUCT.md
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Code of Conduct
|
| 2 |
+
|
| 3 |
+
## Our Pledge
|
| 4 |
+
|
| 5 |
+
We are committed to making participation in this project a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
| 6 |
+
|
| 7 |
+
## Our Standards
|
| 8 |
+
|
| 9 |
+
**Positive behavior includes:**
|
| 10 |
+
- Using welcoming and inclusive language
|
| 11 |
+
- Being respectful of differing viewpoints
|
| 12 |
+
- Gracefully accepting constructive criticism
|
| 13 |
+
- Focusing on what is best for the community
|
| 14 |
+
|
| 15 |
+
**Unacceptable behavior includes:**
|
| 16 |
+
- Trolling, insulting, or derogatory comments
|
| 17 |
+
- Public or private harassment
|
| 18 |
+
- Publishing others' private information without permission
|
| 19 |
+
- Other conduct which could reasonably be considered inappropriate
|
| 20 |
+
|
| 21 |
+
## Enforcement
|
| 22 |
+
|
| 23 |
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting the maintainer. All complaints will be reviewed and investigated.
|
| 24 |
+
|
| 25 |
+
## Attribution
|
| 26 |
+
|
| 27 |
+
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/), version 2.0.
|
CONTRIBUTING.md
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Contributing to HuggingClaw
|
| 2 |
+
|
| 3 |
+
Thanks for your interest in contributing! π¦
|
| 4 |
+
|
| 5 |
+
## How to Contribute
|
| 6 |
+
|
| 7 |
+
### Bug Reports
|
| 8 |
+
- Open an issue with a clear description
|
| 9 |
+
- Include your HF Space logs if possible
|
| 10 |
+
- Mention which LLM provider you're using
|
| 11 |
+
|
| 12 |
+
### Feature Requests
|
| 13 |
+
- Open an issue with the `enhancement` label
|
| 14 |
+
- Describe the use case β why is this needed?
|
| 15 |
+
|
| 16 |
+
### Pull Requests
|
| 17 |
+
1. Fork the repo
|
| 18 |
+
2. Create a feature branch: `git checkout -b feature/my-feature`
|
| 19 |
+
3. Make your changes
|
| 20 |
+
4. Test locally with Docker: `docker build -t huggingclaw . && docker run -p 7860:7860 --env-file .env huggingclaw`
|
| 21 |
+
5. Commit with a clear message
|
| 22 |
+
6. Push and open a PR
|
| 23 |
+
|
| 24 |
+
### Code Style
|
| 25 |
+
- Shell scripts: use `set -e`, quote variables, comment non-obvious logic
|
| 26 |
+
- Keep it simple β this project should stay easy to understand
|
| 27 |
+
- No unnecessary dependencies
|
| 28 |
+
|
| 29 |
+
### Testing
|
| 30 |
+
- Test with at least one LLM provider (Anthropic, OpenAI, or Google)
|
| 31 |
+
- Test with and without Telegram enabled
|
| 32 |
+
- Test with and without workspace backup enabled
|
| 33 |
+
- Verify keep-alive and auto-sync work
|
| 34 |
+
|
| 35 |
+
## Development Setup
|
| 36 |
+
|
| 37 |
+
```bash
|
| 38 |
+
cp .env.example .env
|
| 39 |
+
# Fill in your values
|
| 40 |
+
docker build -t huggingclaw .
|
| 41 |
+
docker run -p 7860:7860 --env-file .env huggingclaw
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
## Questions?
|
| 45 |
+
|
| 46 |
+
Open an issue or start a discussion. We're friendly! π€
|
Dockerfile
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
+
# π¦ HuggingClaw β OpenClaw Gateway for HuggingFace Spaces
|
| 3 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 4 |
+
# Multi-stage build: uses pre-built OpenClaw image for fast builds
|
| 5 |
+
|
| 6 |
+
# ββ Stage 1: Pull pre-built OpenClaw ββ
|
| 7 |
+
FROM ghcr.io/openclaw/openclaw:latest AS openclaw
|
| 8 |
+
|
| 9 |
+
# ββ Stage 2: Runtime ββ
|
| 10 |
+
FROM node:22-slim
|
| 11 |
+
|
| 12 |
+
# Install system dependencies
|
| 13 |
+
RUN apt-get update && apt-get install -y \
|
| 14 |
+
git \
|
| 15 |
+
ca-certificates \
|
| 16 |
+
jq \
|
| 17 |
+
curl \
|
| 18 |
+
python3 \
|
| 19 |
+
python3-pip \
|
| 20 |
+
--no-install-recommends && \
|
| 21 |
+
pip3 install --no-cache-dir --break-system-packages huggingface_hub && \
|
| 22 |
+
rm -rf /var/lib/apt/lists/*
|
| 23 |
+
|
| 24 |
+
# Reuse existing node user (UID 1000)
|
| 25 |
+
RUN mkdir -p /home/node/app /home/node/.openclaw && \
|
| 26 |
+
chown -R 1000:1000 /home/node
|
| 27 |
+
|
| 28 |
+
# Copy pre-built OpenClaw (skips npm install entirely β much faster!)
|
| 29 |
+
COPY --from=openclaw --chown=1000:1000 /app /home/node/.openclaw/openclaw-app
|
| 30 |
+
|
| 31 |
+
# Symlink openclaw CLI so it's available globally
|
| 32 |
+
RUN ln -s /home/node/.openclaw/openclaw-app/openclaw.mjs /usr/local/bin/openclaw 2>/dev/null || \
|
| 33 |
+
npm install -g openclaw@latest
|
| 34 |
+
|
| 35 |
+
# Copy HuggingClaw files
|
| 36 |
+
COPY --chown=1000:1000 dns-fix.js /opt/dns-fix.js
|
| 37 |
+
COPY --chown=1000:1000 health-server.js /home/node/app/health-server.js
|
| 38 |
+
COPY --chown=1000:1000 start.sh /home/node/app/start.sh
|
| 39 |
+
COPY --chown=1000:1000 keep-alive.sh /home/node/app/keep-alive.sh
|
| 40 |
+
COPY --chown=1000:1000 workspace-sync.py /home/node/app/workspace-sync.py
|
| 41 |
+
RUN chmod +x /home/node/app/start.sh /home/node/app/keep-alive.sh
|
| 42 |
+
|
| 43 |
+
USER node
|
| 44 |
+
|
| 45 |
+
ENV HOME=/home/node \
|
| 46 |
+
PATH=/home/node/.local/bin:/usr/local/bin:$PATH \
|
| 47 |
+
NODE_OPTIONS="--require /opt/dns-fix.js"
|
| 48 |
+
|
| 49 |
+
WORKDIR /home/node/app
|
| 50 |
+
|
| 51 |
+
EXPOSE 7860
|
| 52 |
+
|
| 53 |
+
CMD ["/home/node/app/start.sh"]
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2026 Somrat Sorkar (@somratpro)
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
README.md
ADDED
|
@@ -0,0 +1,426 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: HuggingClaw
|
| 3 |
+
emoji: π¦
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
+
pinned: true
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
<!-- Badges -->
|
| 12 |
+
[](https://github.com/somratpro/huggingclaw)
|
| 13 |
+
[](https://opensource.org/licenses/MIT)
|
| 14 |
+
[](https://huggingface.co/spaces)
|
| 15 |
+
[](https://github.com/openclaw/openclaw)
|
| 16 |
+
|
| 17 |
+
# π¦ HuggingClaw
|
| 18 |
+
|
| 19 |
+
Run your own **always-on AI assistant** on HuggingFace Spaces β for free.
|
| 20 |
+
|
| 21 |
+
Works with **any LLM** (Anthropic, OpenAI, Google), connects via **Telegram**, and persists your workspace to **HF Datasets** automatically.
|
| 22 |
+
|
| 23 |
+
### β¨ Features
|
| 24 |
+
|
| 25 |
+
- **Zero-config** β just add 3 secrets and deploy
|
| 26 |
+
- **Any LLM provider** β Claude, GPT-4, Gemini, DeepSeek, Qwen, Grok, and [40+ more](#-llm-provider-setup)
|
| 27 |
+
- **Fast builds** β uses pre-built OpenClaw Docker image (minutes, not 30+)
|
| 28 |
+
- **Smart workspace sync** β uses `huggingface_hub` Python library (more reliable than git for HF)
|
| 29 |
+
- **Built-in keep-alive** β self-pings to prevent HF sleep (no external cron needed)
|
| 30 |
+
- **Auto-create backup** β creates the HF Dataset for you if it doesn't exist
|
| 31 |
+
- **Graceful shutdown** β saves workspace before container dies
|
| 32 |
+
- **Multi-user Telegram** β supports comma-separated user IDs for teams
|
| 33 |
+
- **Health endpoint** β `/health` for monitoring
|
| 34 |
+
- **Password or token auth** β choose what works for you
|
| 35 |
+
- **100% HF-native** β runs entirely on HuggingFace infrastructure
|
| 36 |
+
|
| 37 |
+
---
|
| 38 |
+
|
| 39 |
+
## π Quick Start
|
| 40 |
+
|
| 41 |
+
### 1. Duplicate this Space
|
| 42 |
+
[](https://huggingface.co/spaces/somratpro/HuggingClaw?duplicate=true)
|
| 43 |
+
|
| 44 |
+
Click the button above β name it β set to **Private**
|
| 45 |
+
|
| 46 |
+
### 2. Add Required Secrets
|
| 47 |
+
Go to **Settings β Secrets**:
|
| 48 |
+
|
| 49 |
+
| Secret | Value |
|
| 50 |
+
|--------|-------|
|
| 51 |
+
| `LLM_API_KEY` | Your API key ([Anthropic](https://console.anthropic.com/) / [OpenAI](https://platform.openai.com/) / [Google](https://ai.google.dev/)) |
|
| 52 |
+
| `LLM_MODEL` | Model to use (e.g. `google/gemini-2.5-flash`, `anthropic/claude-sonnet-4-5`, `openai/gpt-4`) |
|
| 53 |
+
| `GATEWAY_TOKEN` | Run `openssl rand -hex 32` to generate |
|
| 54 |
+
|
| 55 |
+
### 3. Deploy
|
| 56 |
+
That's it! The Space builds and starts automatically.
|
| 57 |
+
|
| 58 |
+
### 4. (Optional) Add Telegram
|
| 59 |
+
| Secret | Value |
|
| 60 |
+
|--------|-------|
|
| 61 |
+
| `TELEGRAM_BOT_TOKEN` | From [@BotFather](https://t.me/BotFather) |
|
| 62 |
+
| `TELEGRAM_USER_ID` | Your user ID ([how to find](https://t.me/userinfobot)) |
|
| 63 |
+
|
| 64 |
+
### 5. (Optional) Enable Workspace Backup
|
| 65 |
+
| Secret | Value |
|
| 66 |
+
|--------|-------|
|
| 67 |
+
| `HF_USERNAME` | Your HuggingFace username |
|
| 68 |
+
| `HF_TOKEN` | [HF token](https://huggingface.co/settings/tokens) with write access |
|
| 69 |
+
|
| 70 |
+
The backup dataset (`huggingclaw-backup`) is **created automatically** β no manual setup needed.
|
| 71 |
+
|
| 72 |
+
---
|
| 73 |
+
|
| 74 |
+
## π All Configuration Options
|
| 75 |
+
|
| 76 |
+
See **`.env.example`** for the complete reference with examples.
|
| 77 |
+
|
| 78 |
+
#### Required
|
| 79 |
+
|
| 80 |
+
| Variable | Purpose |
|
| 81 |
+
|----------|---------|
|
| 82 |
+
| `LLM_API_KEY` | LLM provider API key |
|
| 83 |
+
| `LLM_MODEL` | Model to use (e.g. `google/gemini-2.5-flash`, `anthropic/claude-sonnet-4-5`, `openai/gpt-4`) β auto-detects provider from prefix |
|
| 84 |
+
| `GATEWAY_TOKEN` | Gateway auth token |
|
| 85 |
+
|
| 86 |
+
#### Telegram
|
| 87 |
+
|
| 88 |
+
| Variable | Purpose |
|
| 89 |
+
|----------|---------|
|
| 90 |
+
| `TELEGRAM_BOT_TOKEN` | Bot token from @BotFather |
|
| 91 |
+
| `TELEGRAM_USER_ID` | Single user allowlist |
|
| 92 |
+
| `TELEGRAM_USER_IDS` | Multiple users (comma-separated): `123,456,789` |
|
| 93 |
+
|
| 94 |
+
#### Workspace Backup
|
| 95 |
+
|
| 96 |
+
| Variable | Default | Purpose |
|
| 97 |
+
|----------|---------|---------|
|
| 98 |
+
| `HF_USERNAME` | β | Your HF username |
|
| 99 |
+
| `HF_TOKEN` | β | HF token (write access) |
|
| 100 |
+
| `BACKUP_DATASET_NAME` | `huggingclaw-backup` | Dataset name (auto-created!) |
|
| 101 |
+
| `WORKSPACE_GIT_USER` | `openclaw@example.com` | Git commit email |
|
| 102 |
+
| `WORKSPACE_GIT_NAME` | `OpenClaw Bot` | Git commit name |
|
| 103 |
+
|
| 104 |
+
#### Background Services
|
| 105 |
+
|
| 106 |
+
| Variable | Default | Purpose |
|
| 107 |
+
|----------|---------|---------|
|
| 108 |
+
| `KEEP_ALIVE_INTERVAL` | `300` (5 min) | Self-ping interval. `0` = disable |
|
| 109 |
+
| `SYNC_INTERVAL` | `600` (10 min) | Auto-sync interval |
|
| 110 |
+
|
| 111 |
+
#### Security (Optional)
|
| 112 |
+
|
| 113 |
+
| Variable | Default | Purpose |
|
| 114 |
+
|----------|---------|---------|
|
| 115 |
+
| `OPENCLAW_PASSWORD` | β | Password auth (simpler alternative to token) |
|
| 116 |
+
| `TRUSTED_PROXIES` | β | Comma-separated proxy IPs (fixes auth issues behind reverse proxies) |
|
| 117 |
+
| `ALLOWED_ORIGINS` | β | Comma-separated URLs to lock down Control UI |
|
| 118 |
+
|
| 119 |
+
#### Advanced
|
| 120 |
+
|
| 121 |
+
| Variable | Default | Purpose |
|
| 122 |
+
|----------|---------|---------|
|
| 123 |
+
| `OPENCLAW_VERSION` | `latest` | Pin OpenClaw version |
|
| 124 |
+
| `HEALTH_PORT` | `7861` | Health endpoint port |
|
| 125 |
+
|
| 126 |
+
---
|
| 127 |
+
|
| 128 |
+
## π€ LLM Provider Setup
|
| 129 |
+
|
| 130 |
+
Just set `LLM_MODEL` with the correct provider prefix β **any provider is supported**! The provider is auto-detected from the model name. All provider IDs from [OpenClaw docs](https://docs.openclaw.ai/concepts/model-providers).
|
| 131 |
+
|
| 132 |
+
### Anthropic (Claude)
|
| 133 |
+
```
|
| 134 |
+
LLM_API_KEY=sk-ant-v0-...
|
| 135 |
+
LLM_MODEL=anthropic/claude-sonnet-4-5
|
| 136 |
+
```
|
| 137 |
+
Models: `anthropic/claude-opus-4-6` Β· `anthropic/claude-sonnet-4-6` Β· `anthropic/claude-sonnet-4-5` Β· `anthropic/claude-haiku-4-5`
|
| 138 |
+
|
| 139 |
+
### OpenAI
|
| 140 |
+
```
|
| 141 |
+
LLM_API_KEY=sk-...
|
| 142 |
+
LLM_MODEL=openai/gpt-5.4
|
| 143 |
+
```
|
| 144 |
+
Models: `openai/gpt-5.4-pro` Β· `openai/gpt-5.4` Β· `openai/gpt-5.4-mini` Β· `openai/gpt-5.4-nano` Β· `openai/gpt-4.1` Β· `openai/gpt-4.1-mini`
|
| 145 |
+
|
| 146 |
+
### Google (Gemini)
|
| 147 |
+
```
|
| 148 |
+
LLM_API_KEY=AIzaSy...
|
| 149 |
+
LLM_MODEL=google/gemini-2.5-flash
|
| 150 |
+
```
|
| 151 |
+
Models: `google/gemini-3.1-pro-preview` Β· `google/gemini-3-flash-preview` Β· `google/gemini-2.5-pro` Β· `google/gemini-2.5-flash`
|
| 152 |
+
|
| 153 |
+
### DeepSeek
|
| 154 |
+
```
|
| 155 |
+
LLM_API_KEY=sk-...
|
| 156 |
+
LLM_MODEL=deepseek/deepseek-v3.2
|
| 157 |
+
```
|
| 158 |
+
Models: `deepseek/deepseek-v3.2` Β· `deepseek/deepseek-r1-0528` Β· `deepseek/deepseek-r1`
|
| 159 |
+
Get key from: [DeepSeek Platform](https://platform.deepseek.com)
|
| 160 |
+
|
| 161 |
+
### OpenCode Zen (tested & verified models)
|
| 162 |
+
```
|
| 163 |
+
LLM_API_KEY=your_opencode_api_key
|
| 164 |
+
LLM_MODEL=opencode/claude-opus-4-6
|
| 165 |
+
```
|
| 166 |
+
Models: `opencode/claude-opus-4-6` Β· `opencode/gpt-5.4`
|
| 167 |
+
Get key from: [OpenCode.ai](https://opencode.ai/auth)
|
| 168 |
+
|
| 169 |
+
### OpenCode Go (low-cost open models)
|
| 170 |
+
```
|
| 171 |
+
LLM_API_KEY=your_opencode_api_key
|
| 172 |
+
LLM_MODEL=opencode-go/kimi-k2.5
|
| 173 |
+
```
|
| 174 |
+
Get key from: [OpenCode.ai](https://opencode.ai/auth)
|
| 175 |
+
|
| 176 |
+
### Z.ai (GLM)
|
| 177 |
+
```
|
| 178 |
+
LLM_API_KEY=your_zai_api_key
|
| 179 |
+
LLM_MODEL=zai/glm-5
|
| 180 |
+
```
|
| 181 |
+
Models: `zai/glm-5` Β· `zai/glm-5-turbo` Β· `zai/glm-4.7` Β· `zai/glm-4.7-flash`
|
| 182 |
+
Get key from: [Z.ai](https://z.ai) Β· Note: `z-ai/` and `z.ai/` prefixes auto-normalize to `zai/`
|
| 183 |
+
|
| 184 |
+
### Moonshot (Kimi)
|
| 185 |
+
```
|
| 186 |
+
LLM_API_KEY=sk-...
|
| 187 |
+
LLM_MODEL=moonshot/kimi-k2.5
|
| 188 |
+
```
|
| 189 |
+
Models: `moonshot/kimi-k2.5` Β· `moonshot/kimi-k2-thinking`
|
| 190 |
+
Get key from: [Moonshot API](https://platform.moonshot.cn)
|
| 191 |
+
|
| 192 |
+
### Mistral
|
| 193 |
+
```
|
| 194 |
+
LLM_API_KEY=your_mistral_api_key
|
| 195 |
+
LLM_MODEL=mistral/mistral-large-latest
|
| 196 |
+
```
|
| 197 |
+
Models: `mistral/mistral-large-latest` Β· `mistral/mistral-small-2603` Β· `mistral/devstral-medium` Β· `mistral/codestral-2508`
|
| 198 |
+
Get key from: [Mistral Console](https://console.mistral.ai)
|
| 199 |
+
|
| 200 |
+
### xAI (Grok)
|
| 201 |
+
```
|
| 202 |
+
LLM_API_KEY=your_xai_api_key
|
| 203 |
+
LLM_MODEL=xai/grok-4.20-beta
|
| 204 |
+
```
|
| 205 |
+
Models: `xai/grok-4.20-beta` Β· `xai/grok-4` Β· `xai/grok-4.1-fast`
|
| 206 |
+
Get key from: [xAI Console](https://console.x.ai)
|
| 207 |
+
|
| 208 |
+
### MiniMax
|
| 209 |
+
```
|
| 210 |
+
LLM_API_KEY=your_minimax_api_key
|
| 211 |
+
LLM_MODEL=minimax/minimax-m2.7
|
| 212 |
+
```
|
| 213 |
+
Models: `minimax/minimax-m2.7` Β· `minimax/minimax-m2.5`
|
| 214 |
+
Get key from: [MiniMax Platform](https://platform.minimax.io)
|
| 215 |
+
|
| 216 |
+
### NVIDIA
|
| 217 |
+
```
|
| 218 |
+
LLM_API_KEY=your_nvidia_api_key
|
| 219 |
+
LLM_MODEL=nvidia/nemotron-3-super-120b-a12b
|
| 220 |
+
```
|
| 221 |
+
Get key from: [NVIDIA API](https://api.nvidia.com)
|
| 222 |
+
|
| 223 |
+
### Xiaomi (MiMo)
|
| 224 |
+
```
|
| 225 |
+
LLM_API_KEY=your_xiaomi_api_key
|
| 226 |
+
LLM_MODEL=xiaomi/mimo-v2-pro
|
| 227 |
+
```
|
| 228 |
+
Models: `xiaomi/mimo-v2-pro` Β· `xiaomi/mimo-v2-omni`
|
| 229 |
+
|
| 230 |
+
### Volcengine (Doubao / ByteDance)
|
| 231 |
+
```
|
| 232 |
+
LLM_API_KEY=your_volcengine_api_key
|
| 233 |
+
LLM_MODEL=volcengine/doubao-seed-1-8-251228
|
| 234 |
+
```
|
| 235 |
+
Models: `volcengine/doubao-seed-1-8-251228` Β· `volcengine/kimi-k2-5-260127` Β· `volcengine/glm-4-7-251222`
|
| 236 |
+
Get key from: [Volcengine](https://www.volcengine.com)
|
| 237 |
+
|
| 238 |
+
### Groq
|
| 239 |
+
```
|
| 240 |
+
LLM_API_KEY=your_groq_api_key
|
| 241 |
+
LLM_MODEL=groq/mixtral-8x7b-32768
|
| 242 |
+
```
|
| 243 |
+
Get key from: [Groq Console](https://console.groq.com)
|
| 244 |
+
|
| 245 |
+
### Cohere
|
| 246 |
+
```
|
| 247 |
+
LLM_API_KEY=your_cohere_api_key
|
| 248 |
+
LLM_MODEL=cohere/command-a
|
| 249 |
+
```
|
| 250 |
+
Get key from: [Cohere Dashboard](https://dashboard.cohere.com)
|
| 251 |
+
|
| 252 |
+
### HuggingFace Inference
|
| 253 |
+
```
|
| 254 |
+
LLM_API_KEY=hf_your_token
|
| 255 |
+
LLM_MODEL=huggingface/deepseek-ai/DeepSeek-R1
|
| 256 |
+
```
|
| 257 |
+
Get key from: [HuggingFace Tokens](https://huggingface.co/settings/tokens)
|
| 258 |
+
|
| 259 |
+
### OpenRouter (300+ models via single API key)
|
| 260 |
+
```
|
| 261 |
+
LLM_API_KEY=sk-or-v1-...
|
| 262 |
+
LLM_MODEL=openrouter/anthropic/claude-sonnet-4-6
|
| 263 |
+
```
|
| 264 |
+
With OpenRouter, you can access **every model above** with a single API key! Just prefix with `openrouter/`:
|
| 265 |
+
- `openrouter/anthropic/claude-sonnet-4-6` β Anthropic Claude
|
| 266 |
+
- `openrouter/openai/gpt-5.4` β OpenAI
|
| 267 |
+
- `openrouter/deepseek/deepseek-v3.2` β DeepSeek
|
| 268 |
+
- `openrouter/google/gemini-2.5-flash` β Google Gemini
|
| 269 |
+
- `openrouter/meta-llama/llama-3.3-70b-instruct:free` β Llama (free!)
|
| 270 |
+
- `openrouter/moonshotai/kimi-k2.5` β Moonshot Kimi
|
| 271 |
+
- `openrouter/z-ai/glm-5-turbo` β Z.ai GLM
|
| 272 |
+
|
| 273 |
+
Get key from: [OpenRouter.ai](https://openrouter.ai) Β· [Full model list](https://openrouter.ai/models)
|
| 274 |
+
|
| 275 |
+
### Kilo Gateway
|
| 276 |
+
```
|
| 277 |
+
LLM_API_KEY=your_kilocode_api_key
|
| 278 |
+
LLM_MODEL=kilocode/anthropic/claude-opus-4.6
|
| 279 |
+
```
|
| 280 |
+
Get key from: [Kilo.ai](https://kilo.ai)
|
| 281 |
+
|
| 282 |
+
### Any Other Provider
|
| 283 |
+
HuggingClaw supports **any LLM provider** that OpenClaw supports. Just use:
|
| 284 |
+
```
|
| 285 |
+
LLM_API_KEY=your_api_key
|
| 286 |
+
LLM_MODEL=provider/model-name
|
| 287 |
+
```
|
| 288 |
+
The provider prefix is auto-detected and mapped to the appropriate environment variable.
|
| 289 |
+
|
| 290 |
+
Full provider list: [OpenClaw Model Providers](https://docs.openclaw.ai/concepts/model-providers) Β· [OpenCode Providers](https://opencode.ai/docs/providers)
|
| 291 |
+
|
| 292 |
+
---
|
| 293 |
+
|
| 294 |
+
## π± Telegram Setup
|
| 295 |
+
|
| 296 |
+
1. Message [@BotFather](https://t.me/BotFather) β `/newbot` β copy the token
|
| 297 |
+
2. Message [@userinfobot](https://t.me/userinfobot) to get your user ID
|
| 298 |
+
3. Add secrets: `TELEGRAM_BOT_TOKEN` and `TELEGRAM_USER_ID`
|
| 299 |
+
4. Restart the Space β DM your bot π
|
| 300 |
+
|
| 301 |
+
**Multiple users?** Use `TELEGRAM_USER_IDS=123,456,789` (comma-separated)
|
| 302 |
+
|
| 303 |
+
---
|
| 304 |
+
|
| 305 |
+
## πΎ Workspace Backup
|
| 306 |
+
|
| 307 |
+
Set `HF_USERNAME` + `HF_TOKEN` and HuggingClaw handles everything:
|
| 308 |
+
|
| 309 |
+
1. **Auto-creates** the dataset if it doesn't exist
|
| 310 |
+
2. **Restores** workspace on every startup
|
| 311 |
+
3. **Smart sync** β uses `huggingface_hub` Python library (handles auth, LFS, retries automatically; falls back to git if unavailable)
|
| 312 |
+
4. **Auto-syncs** changes every 10 minutes (configurable via `SYNC_INTERVAL`)
|
| 313 |
+
5. **Saves** on shutdown (graceful SIGTERM handling)
|
| 314 |
+
|
| 315 |
+
Custom dataset name: `BACKUP_DATASET_NAME=my-custom-backup`
|
| 316 |
+
|
| 317 |
+
---
|
| 318 |
+
|
| 319 |
+
## π How It Stays Alive
|
| 320 |
+
|
| 321 |
+
HF Spaces sleeps after 48h of no HTTP requests. HuggingClaw prevents this with:
|
| 322 |
+
|
| 323 |
+
- **Self-ping** β pings its own URL every 5 min (uses HF's `SPACE_HOST` env var)
|
| 324 |
+
- **Health endpoint** β returns `200 OK` with uptime info
|
| 325 |
+
- **Zero dependencies** β no external cron, no third-party pinger
|
| 326 |
+
|
| 327 |
+
Your Space runs forever, powered entirely by HF. π―
|
| 328 |
+
|
| 329 |
+
---
|
| 330 |
+
|
| 331 |
+
## π» Local Development
|
| 332 |
+
|
| 333 |
+
```bash
|
| 334 |
+
git clone https://github.com/somratpro/huggingclaw.git
|
| 335 |
+
cd huggingclaw
|
| 336 |
+
cp .env.example .env
|
| 337 |
+
nano .env # fill in your values
|
| 338 |
+
```
|
| 339 |
+
|
| 340 |
+
**Docker:**
|
| 341 |
+
```bash
|
| 342 |
+
docker build -t huggingclaw .
|
| 343 |
+
docker run -p 7860:7860 --env-file .env huggingclaw
|
| 344 |
+
```
|
| 345 |
+
|
| 346 |
+
**Without Docker:**
|
| 347 |
+
```bash
|
| 348 |
+
npm install -g openclaw@latest
|
| 349 |
+
export $(cat .env | xargs)
|
| 350 |
+
bash start.sh
|
| 351 |
+
```
|
| 352 |
+
|
| 353 |
+
---
|
| 354 |
+
|
| 355 |
+
## π Connect via CLI
|
| 356 |
+
|
| 357 |
+
```bash
|
| 358 |
+
npm install -g openclaw@latest
|
| 359 |
+
openclaw channels login --gateway https://YOUR-SPACE-URL.hf.space
|
| 360 |
+
# Enter your GATEWAY_TOKEN when prompted
|
| 361 |
+
```
|
| 362 |
+
|
| 363 |
+
---
|
| 364 |
+
|
| 365 |
+
## ποΈ Architecture
|
| 366 |
+
|
| 367 |
+
```
|
| 368 |
+
HuggingClaw/
|
| 369 |
+
βββ Dockerfile # Multi-stage build with pre-built OpenClaw image
|
| 370 |
+
βββ start.sh # Config generator + validation + orchestrator
|
| 371 |
+
βββ keep-alive.sh # Self-ping to prevent HF sleep
|
| 372 |
+
βββ workspace-sync.py # Smart sync via huggingface_hub (with git fallback)
|
| 373 |
+
βββ health-server.js # Health endpoint (/health)
|
| 374 |
+
βββ dns-fix.js # DNS override for HF network restrictions
|
| 375 |
+
βββ .env.example # Complete configuration reference
|
| 376 |
+
βββ README.md # You are here
|
| 377 |
+
```
|
| 378 |
+
|
| 379 |
+
**Startup flow:**
|
| 380 |
+
1. Validate secrets β fail fast with clear errors
|
| 381 |
+
2. Validate HF token β warn if expired
|
| 382 |
+
3. Auto-create backup dataset if missing
|
| 383 |
+
4. Restore workspace from HF Dataset
|
| 384 |
+
5. Generate `openclaw.json` config from env vars
|
| 385 |
+
6. Print startup summary
|
| 386 |
+
7. Start background services (keep-alive, auto-sync)
|
| 387 |
+
8. Launch OpenClaw gateway
|
| 388 |
+
9. On SIGTERM β save workspace β exit cleanly
|
| 389 |
+
|
| 390 |
+
---
|
| 391 |
+
|
| 392 |
+
## π Troubleshooting
|
| 393 |
+
|
| 394 |
+
**Missing secrets** β Check **Settings β Secrets** for `LLM_API_KEY` and `GATEWAY_TOKEN`
|
| 395 |
+
|
| 396 |
+
**Telegram not working** β Verify bot token is valid, check logs for `π± Enabling Telegram`
|
| 397 |
+
|
| 398 |
+
**Workspace not restoring** β Check `HF_USERNAME` and `HF_TOKEN` are set, token has write access
|
| 399 |
+
|
| 400 |
+
**Space sleeping** β Check logs for `π Keep-alive started`. If missing, `SPACE_HOST` might not be set
|
| 401 |
+
|
| 402 |
+
**"Proxy headers detected" or auth errors** β Set `TRUSTED_PROXIES` with the IPs from your Space logs (`remote=x.x.x.x`)
|
| 403 |
+
|
| 404 |
+
**Control UI blocked** β Set `ALLOWED_ORIGINS=https://your-space.hf.space` or check logs for origin errors
|
| 405 |
+
|
| 406 |
+
**Version issues** β Pin with `OPENCLAW_VERSION=2026.3.24` in secrets
|
| 407 |
+
|
| 408 |
+
---
|
| 409 |
+
|
| 410 |
+
## π Links
|
| 411 |
+
|
| 412 |
+
- [OpenClaw Docs](https://docs.openclaw.ai) Β· [OpenClaw GitHub](https://github.com/openclaw/openclaw) Β· [HF Spaces Docs](https://huggingface.co/docs/hub/spaces)
|
| 413 |
+
|
| 414 |
+
---
|
| 415 |
+
|
| 416 |
+
## π€ Contributing
|
| 417 |
+
|
| 418 |
+
Contributions welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
|
| 419 |
+
|
| 420 |
+
## π License
|
| 421 |
+
|
| 422 |
+
MIT β see [LICENSE](LICENSE) for details.
|
| 423 |
+
|
| 424 |
+
---
|
| 425 |
+
|
| 426 |
+
Made with β€οΈ by [@somratpro](https://github.com/somratpro) for the [OpenClaw](https://github.com/openclaw/openclaw) community
|
SECURITY.md
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Security Policy
|
| 2 |
+
|
| 3 |
+
## Reporting a Vulnerability
|
| 4 |
+
|
| 5 |
+
If you discover a security vulnerability, please report it responsibly:
|
| 6 |
+
|
| 7 |
+
1. **Do NOT open a public issue**
|
| 8 |
+
2. Email the maintainer or open a private security advisory on GitHub
|
| 9 |
+
3. Include steps to reproduce if possible
|
| 10 |
+
|
| 11 |
+
We'll respond within 48 hours and work on a fix.
|
| 12 |
+
|
| 13 |
+
## Security Best Practices
|
| 14 |
+
|
| 15 |
+
When deploying HuggingClaw:
|
| 16 |
+
|
| 17 |
+
- **Set your Space to Private** β prevents unauthorized access to your gateway
|
| 18 |
+
- **Use a strong `GATEWAY_TOKEN`** β generate with `openssl rand -hex 32`
|
| 19 |
+
- **Keep your HF token scoped** β use fine-grained tokens with minimum permissions
|
| 20 |
+
- **Don't commit `.env` files** β the `.gitignore` already excludes them
|
| 21 |
+
- **Use `TELEGRAM_USER_ID`** β restricts bot access to your account only
|
| 22 |
+
- **Review logs regularly** β check for unauthorized access attempts
|
| 23 |
+
|
| 24 |
+
## Supported Versions
|
| 25 |
+
|
| 26 |
+
| Version | Supported |
|
| 27 |
+
|---------|-----------|
|
| 28 |
+
| 1.0.x | β
|
|
dns-fix.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Fix HF Spaces DNS: internal resolver can't resolve discord.com / api.telegram.org
|
| 2 |
+
// Override dns.lookup (used by http/https) to use Google/Cloudflare DNS
|
| 3 |
+
const dns = require('dns');
|
| 4 |
+
const { Resolver } = dns;
|
| 5 |
+
const resolver = new Resolver();
|
| 6 |
+
resolver.setServers(['8.8.8.8', '1.1.1.1']);
|
| 7 |
+
|
| 8 |
+
const origLookup = dns.lookup;
|
| 9 |
+
dns.lookup = function(hostname, options, callback) {
|
| 10 |
+
if (typeof options === 'function') { callback = options; options = { family: 0 }; }
|
| 11 |
+
resolver.resolve4(hostname, (err, addresses) => {
|
| 12 |
+
if (err || !addresses || !addresses.length) return origLookup.call(dns, hostname, options, callback);
|
| 13 |
+
if (options && options.all) {
|
| 14 |
+
callback(null, addresses.map(a => ({ address: a, family: 4 })));
|
| 15 |
+
} else {
|
| 16 |
+
callback(null, addresses[0], 4);
|
| 17 |
+
}
|
| 18 |
+
});
|
| 19 |
+
};
|
health-server.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Lightweight health endpoint on port 7861
|
| 2 |
+
// OpenClaw runs on 7860, this runs alongside it
|
| 3 |
+
// Returns 200 OK for keep-alive pings and external monitoring
|
| 4 |
+
const http = require('http');
|
| 5 |
+
|
| 6 |
+
const PORT = process.env.HEALTH_PORT || 7861;
|
| 7 |
+
const startTime = Date.now();
|
| 8 |
+
|
| 9 |
+
const server = http.createServer((req, res) => {
|
| 10 |
+
if (req.url === '/health' || req.url === '/') {
|
| 11 |
+
const uptime = Math.floor((Date.now() - startTime) / 1000);
|
| 12 |
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
| 13 |
+
res.end(JSON.stringify({
|
| 14 |
+
status: 'ok',
|
| 15 |
+
uptime: uptime,
|
| 16 |
+
uptimeHuman: `${Math.floor(uptime / 3600)}h ${Math.floor((uptime % 3600) / 60)}m`,
|
| 17 |
+
timestamp: new Date().toISOString()
|
| 18 |
+
}));
|
| 19 |
+
} else {
|
| 20 |
+
res.writeHead(404);
|
| 21 |
+
res.end();
|
| 22 |
+
}
|
| 23 |
+
});
|
| 24 |
+
|
| 25 |
+
server.listen(PORT, '0.0.0.0', () => {
|
| 26 |
+
console.log(`π₯ Health server listening on port ${PORT}`);
|
| 27 |
+
});
|
keep-alive.sh
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Self-ping keep-alive for HF Spaces
|
| 3 |
+
# HF Spaces sleeps after 48h of inactivity (no HTTP requests)
|
| 4 |
+
# This script pings the Space's own URL to prevent that
|
| 5 |
+
#
|
| 6 |
+
# HF provides SPACE_HOST env var automatically (e.g., "username-spacename.hf.space")
|
| 7 |
+
# Runs as a background process alongside the gateway
|
| 8 |
+
|
| 9 |
+
INTERVAL="${KEEP_ALIVE_INTERVAL:-300}" # Default: every 5 minutes
|
| 10 |
+
|
| 11 |
+
if [ "$INTERVAL" = "0" ]; then
|
| 12 |
+
echo "βΈοΈ Keep-alive: disabled (KEEP_ALIVE_INTERVAL=0)"
|
| 13 |
+
exit 0
|
| 14 |
+
fi
|
| 15 |
+
|
| 16 |
+
if [ -z "$SPACE_HOST" ]; then
|
| 17 |
+
echo "βΈοΈ Keep-alive: SPACE_HOST not set (not on HF Spaces?), skipping."
|
| 18 |
+
exit 0
|
| 19 |
+
fi
|
| 20 |
+
|
| 21 |
+
# Ping the Space URL β any HTTP response (even 404) counts as activity
|
| 22 |
+
PING_URL="https://${SPACE_HOST}"
|
| 23 |
+
|
| 24 |
+
echo "π Keep-alive started: pinging ${PING_URL} every ${INTERVAL}s"
|
| 25 |
+
|
| 26 |
+
while true; do
|
| 27 |
+
sleep "$INTERVAL"
|
| 28 |
+
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 10 "$PING_URL" 2>/dev/null)
|
| 29 |
+
if [ "$HTTP_CODE" = "000" ]; then
|
| 30 |
+
echo "π Keep-alive: ping failed (network error), retrying next cycle..."
|
| 31 |
+
else
|
| 32 |
+
echo "π Keep-alive: OK"
|
| 33 |
+
fi
|
| 34 |
+
done
|
start.sh
ADDED
|
@@ -0,0 +1,330 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
set -e
|
| 3 |
+
|
| 4 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 5 |
+
# HuggingClaw β OpenClaw Gateway for HF Spaces
|
| 6 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 7 |
+
|
| 8 |
+
# ββ Startup Banner ββ
|
| 9 |
+
OPENCLAW_VERSION="${OPENCLAW_VERSION:-latest}"
|
| 10 |
+
echo ""
|
| 11 |
+
echo " ββββββββββββββββββββββββββββββββββββββββββββ"
|
| 12 |
+
echo " β π¦ HuggingClaw Gateway β"
|
| 13 |
+
echo " ββββββββββββββββββββββββββββββββββββββββββββ"
|
| 14 |
+
echo ""
|
| 15 |
+
|
| 16 |
+
# ββ Validate required secrets ββ
|
| 17 |
+
ERRORS=""
|
| 18 |
+
if [ -z "$LLM_API_KEY" ]; then
|
| 19 |
+
ERRORS="${ERRORS} β LLM_API_KEY is not set\n"
|
| 20 |
+
fi
|
| 21 |
+
if [ -z "$LLM_MODEL" ]; then
|
| 22 |
+
ERRORS="${ERRORS} β LLM_MODEL is not set (e.g. google/gemini-2.5-flash, anthropic/claude-sonnet-4-5, openai/gpt-4)\n"
|
| 23 |
+
fi
|
| 24 |
+
if [ -z "$GATEWAY_TOKEN" ]; then
|
| 25 |
+
ERRORS="${ERRORS} β GATEWAY_TOKEN is not set (generate: openssl rand -hex 32)\n"
|
| 26 |
+
fi
|
| 27 |
+
if [ -n "$ERRORS" ]; then
|
| 28 |
+
echo "Missing required secrets:"
|
| 29 |
+
echo -e "$ERRORS"
|
| 30 |
+
echo "Add them in HF Spaces β Settings β Secrets"
|
| 31 |
+
exit 1
|
| 32 |
+
fi
|
| 33 |
+
|
| 34 |
+
# ββ Set LLM env based on model name ββ
|
| 35 |
+
|
| 36 |
+
# Auto-correct Gemini models to use google/ prefix if anthropic/ was mistakenly used
|
| 37 |
+
if [[ "$LLM_MODEL" == "anthropic/gemini"* ]]; then
|
| 38 |
+
LLM_MODEL=$(echo "$LLM_MODEL" | sed 's/^anthropic\//google\//')
|
| 39 |
+
echo "β οΈ Corrected model from anthropic/gemini* to google/gemini*"
|
| 40 |
+
fi
|
| 41 |
+
|
| 42 |
+
# Extract provider prefix from model name (e.g. "google/gemini-2.5-flash" β "google")
|
| 43 |
+
LLM_PROVIDER=$(echo "$LLM_MODEL" | cut -d'/' -f1)
|
| 44 |
+
|
| 45 |
+
# Map provider prefix to the correct API key environment variable
|
| 46 |
+
# Based on OpenClaw provider system: /usr/local/lib/node_modules/openclaw/docs/concepts/model-providers.md
|
| 47 |
+
# Note: OpenClaw normalizes some prefixes (z-ai β zai, z.ai β zai, etc.)
|
| 48 |
+
case "$LLM_PROVIDER" in
|
| 49 |
+
# ββ Core Providers ββ
|
| 50 |
+
anthropic) export ANTHROPIC_API_KEY="$LLM_API_KEY" ;;
|
| 51 |
+
openai|openai-codex) export OPENAI_API_KEY="$LLM_API_KEY" ;;
|
| 52 |
+
google|google-vertex) export GEMINI_API_KEY="$LLM_API_KEY" ;;
|
| 53 |
+
deepseek) export DEEPSEEK_API_KEY="$LLM_API_KEY" ;;
|
| 54 |
+
# ββ OpenCode Providers ββ
|
| 55 |
+
opencode) export OPENCODE_API_KEY="$LLM_API_KEY" ;;
|
| 56 |
+
opencode-go) export OPENCODE_API_KEY="$LLM_API_KEY" ;;
|
| 57 |
+
# ββ Gateway/Router Providers ββ
|
| 58 |
+
openrouter) export OPENROUTER_API_KEY="$LLM_API_KEY" ;;
|
| 59 |
+
kilocode) export KILOCODE_API_KEY="$LLM_API_KEY" ;;
|
| 60 |
+
vercel-ai-gateway) export AI_GATEWAY_API_KEY="$LLM_API_KEY" ;;
|
| 61 |
+
# ββ Chinese/Asian Providers ββ
|
| 62 |
+
zai|z-ai|z.ai|zhipu) export ZAI_API_KEY="$LLM_API_KEY" ;;
|
| 63 |
+
moonshot) export MOONSHOT_API_KEY="$LLM_API_KEY" ;;
|
| 64 |
+
kimi-coding) export KIMI_API_KEY="$LLM_API_KEY" ;;
|
| 65 |
+
minimax) export MINIMAX_API_KEY="$LLM_API_KEY" ;;
|
| 66 |
+
qwen|modelstudio) export MODELSTUDIO_API_KEY="$LLM_API_KEY" ;;
|
| 67 |
+
xiaomi) export XIAOMI_API_KEY="$LLM_API_KEY" ;;
|
| 68 |
+
volcengine|volcengine-plan) export VOLCANO_ENGINE_API_KEY="$LLM_API_KEY" ;;
|
| 69 |
+
byteplus|byteplus-plan) export BYTEPLUS_API_KEY="$LLM_API_KEY" ;;
|
| 70 |
+
qianfan) export QIANFAN_API_KEY="$LLM_API_KEY" ;;
|
| 71 |
+
# ββ Western Providers ββ
|
| 72 |
+
mistral|mistralai) export MISTRAL_API_KEY="$LLM_API_KEY" ;;
|
| 73 |
+
xai|x-ai) export XAI_API_KEY="$LLM_API_KEY" ;;
|
| 74 |
+
nvidia) export NVIDIA_API_KEY="$LLM_API_KEY" ;;
|
| 75 |
+
cohere) export COHERE_API_KEY="$LLM_API_KEY" ;;
|
| 76 |
+
groq) export GROQ_API_KEY="$LLM_API_KEY" ;;
|
| 77 |
+
together) export TOGETHER_API_KEY="$LLM_API_KEY" ;;
|
| 78 |
+
huggingface) export HUGGINGFACE_HUB_TOKEN="$LLM_API_KEY" ;;
|
| 79 |
+
cerebras) export CEREBRAS_API_KEY="$LLM_API_KEY" ;;
|
| 80 |
+
venice) export VENICE_API_KEY="$LLM_API_KEY" ;;
|
| 81 |
+
synthetic) export SYNTHETIC_API_KEY="$LLM_API_KEY" ;;
|
| 82 |
+
github-copilot) export COPILOT_GITHUB_TOKEN="$LLM_API_KEY" ;;
|
| 83 |
+
# ββ Fallback: Anthropic (default) ββ
|
| 84 |
+
*)
|
| 85 |
+
export ANTHROPIC_API_KEY="$LLM_API_KEY"
|
| 86 |
+
;;
|
| 87 |
+
esac
|
| 88 |
+
|
| 89 |
+
# ββ Setup directories ββ
|
| 90 |
+
mkdir -p /home/node/.openclaw/agents/main/sessions
|
| 91 |
+
mkdir -p /home/node/.openclaw/credentials
|
| 92 |
+
mkdir -p /home/node/.openclaw/workspace
|
| 93 |
+
chmod 700 /home/node/.openclaw
|
| 94 |
+
chmod 700 /home/node/.openclaw/credentials
|
| 95 |
+
|
| 96 |
+
# ββ Validate HF token (if provided) ββ
|
| 97 |
+
if [ -n "$HF_TOKEN" ]; then
|
| 98 |
+
echo "π Validating HF token..."
|
| 99 |
+
HF_AUTH_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: Bearer $HF_TOKEN" https://huggingface.co/api/repos/create --max-time 10 2>/dev/null || echo "000")
|
| 100 |
+
if [ "$HF_AUTH_STATUS" = "401" ]; then
|
| 101 |
+
echo " β οΈ HF token is invalid or expired! Workspace backup will not work."
|
| 102 |
+
echo " Get a new token: https://huggingface.co/settings/tokens"
|
| 103 |
+
else
|
| 104 |
+
echo " β
HF token is valid"
|
| 105 |
+
fi
|
| 106 |
+
fi
|
| 107 |
+
|
| 108 |
+
# ββ Auto-create + Restore workspace from HF Dataset ββ
|
| 109 |
+
if [ -n "$HF_USERNAME" ] && [ -n "$HF_TOKEN" ]; then
|
| 110 |
+
BACKUP_DATASET="${BACKUP_DATASET_NAME:-huggingclaw-backup}"
|
| 111 |
+
BACKUP_URL="https://${HF_USERNAME}:${HF_TOKEN}@huggingface.co/datasets/${HF_USERNAME}/${BACKUP_DATASET}"
|
| 112 |
+
|
| 113 |
+
# Auto-create the dataset if it doesn't exist
|
| 114 |
+
echo "π¦ Checking HF Dataset: ${HF_USERNAME}/${BACKUP_DATASET}..."
|
| 115 |
+
DATASET_CHECK=$(curl -s -o /dev/null -w "%{http_code}" \
|
| 116 |
+
-H "Authorization: Bearer $HF_TOKEN" \
|
| 117 |
+
"https://huggingface.co/api/datasets/${HF_USERNAME}/${BACKUP_DATASET}" \
|
| 118 |
+
--max-time 10 2>/dev/null || echo "000")
|
| 119 |
+
|
| 120 |
+
if [ "$DATASET_CHECK" = "404" ]; then
|
| 121 |
+
echo " π Dataset not found, creating ${HF_USERNAME}/${BACKUP_DATASET}..."
|
| 122 |
+
CREATE_RESULT=$(curl -s -w "\n%{http_code}" \
|
| 123 |
+
-X POST "https://huggingface.co/api/repos/create" \
|
| 124 |
+
-H "Authorization: Bearer $HF_TOKEN" \
|
| 125 |
+
-H "Content-Type: application/json" \
|
| 126 |
+
-d "{\"type\":\"dataset\",\"name\":\"${BACKUP_DATASET}\",\"private\":true}" \
|
| 127 |
+
--max-time 15 2>/dev/null || echo "error")
|
| 128 |
+
CREATE_STATUS=$(echo "$CREATE_RESULT" | tail -1)
|
| 129 |
+
if [ "$CREATE_STATUS" = "200" ] || [ "$CREATE_STATUS" = "201" ]; then
|
| 130 |
+
echo " β
Dataset created: ${HF_USERNAME}/${BACKUP_DATASET} (private)"
|
| 131 |
+
else
|
| 132 |
+
echo " β οΈ Could not create dataset (HTTP $CREATE_STATUS). Create it manually:"
|
| 133 |
+
echo " https://huggingface.co/datasets/create"
|
| 134 |
+
fi
|
| 135 |
+
elif [ "$DATASET_CHECK" = "200" ]; then
|
| 136 |
+
echo " β
Dataset exists"
|
| 137 |
+
else
|
| 138 |
+
echo " β οΈ Could not check dataset (HTTP $DATASET_CHECK)"
|
| 139 |
+
fi
|
| 140 |
+
|
| 141 |
+
# Restore workspace
|
| 142 |
+
echo "π¦ Restoring workspace..."
|
| 143 |
+
WORKSPACE="/home/node/.openclaw/workspace"
|
| 144 |
+
GIT_USER_EMAIL="${WORKSPACE_GIT_USER:-openclaw@example.com}"
|
| 145 |
+
GIT_USER_NAME="${WORKSPACE_GIT_NAME:-OpenClaw Bot}"
|
| 146 |
+
|
| 147 |
+
cd "$WORKSPACE"
|
| 148 |
+
if [ ! -d ".git" ]; then
|
| 149 |
+
git init -q
|
| 150 |
+
git remote add origin "$BACKUP_URL"
|
| 151 |
+
else
|
| 152 |
+
git remote set-url origin "$BACKUP_URL"
|
| 153 |
+
fi
|
| 154 |
+
|
| 155 |
+
git config user.email "$GIT_USER_EMAIL"
|
| 156 |
+
git config user.name "$GIT_USER_NAME"
|
| 157 |
+
|
| 158 |
+
if git fetch origin main 2>/dev/null; then
|
| 159 |
+
git reset --hard origin/main 2>/dev/null && echo " β
Workspace restored!"
|
| 160 |
+
else
|
| 161 |
+
echo " β οΈ No remote data yet, starting fresh."
|
| 162 |
+
fi
|
| 163 |
+
cd /
|
| 164 |
+
fi
|
| 165 |
+
|
| 166 |
+
# ββ Build config ββ
|
| 167 |
+
CONFIG_JSON=$(cat <<'CONFIGEOF'
|
| 168 |
+
{
|
| 169 |
+
"gateway": {
|
| 170 |
+
"mode": "local",
|
| 171 |
+
"port": 7860,
|
| 172 |
+
"bind": "lan",
|
| 173 |
+
"auth": {
|
| 174 |
+
"token": ""
|
| 175 |
+
},
|
| 176 |
+
"controlUi": {
|
| 177 |
+
"allowInsecureAuth": true
|
| 178 |
+
},
|
| 179 |
+
"trustedProxies": ["10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"]
|
| 180 |
+
},
|
| 181 |
+
"channels": {},
|
| 182 |
+
"plugins": {
|
| 183 |
+
"entries": {}
|
| 184 |
+
}
|
| 185 |
+
}
|
| 186 |
+
CONFIGEOF
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
# Gateway token
|
| 190 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".gateway.auth.token = \"$GATEWAY_TOKEN\"")
|
| 191 |
+
|
| 192 |
+
# Model configuration at top level
|
| 193 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".agents.defaults.model = \"$LLM_MODEL\"")
|
| 194 |
+
|
| 195 |
+
# Control UI origin (allow HF Space URL for web UI access)
|
| 196 |
+
if [ -n "$SPACE_HOST" ]; then
|
| 197 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".gateway.controlUi.allowedOrigins = [\"https://${SPACE_HOST}\"]")
|
| 198 |
+
fi
|
| 199 |
+
|
| 200 |
+
# Disable device auth (pairing) for headless Docker β token-only auth
|
| 201 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".gateway.controlUi.dangerouslyDisableDeviceAuth = true")
|
| 202 |
+
|
| 203 |
+
# Password auth (optional β simpler alternative to token for casual users)
|
| 204 |
+
if [ -n "$OPENCLAW_PASSWORD" ]; then
|
| 205 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".gateway.auth.mode = \"password\" | .gateway.auth.password = \"$OPENCLAW_PASSWORD\"")
|
| 206 |
+
fi
|
| 207 |
+
|
| 208 |
+
# Trusted proxies (optional β fixes "Proxy headers detected from untrusted address" on HF Spaces)
|
| 209 |
+
# Set TRUSTED_PROXIES as comma-separated IPs, e.g. "10.20.31.87,10.20.26.157"
|
| 210 |
+
if [ -n "$TRUSTED_PROXIES" ]; then
|
| 211 |
+
PROXIES_JSON=$(echo "$TRUSTED_PROXIES" | tr ',' '\n' | sed 's/^ *//;s/ *$//' | jq -R . | jq -s .)
|
| 212 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".gateway.trustedProxies = $PROXIES_JSON")
|
| 213 |
+
fi
|
| 214 |
+
|
| 215 |
+
# Allowed origins (optional β lock down Control UI to specific URLs)
|
| 216 |
+
# Set ALLOWED_ORIGINS as comma-separated URLs, e.g. "https://your-space.hf.space"
|
| 217 |
+
if [ -n "$ALLOWED_ORIGINS" ]; then
|
| 218 |
+
ORIGINS_JSON=$(echo "$ALLOWED_ORIGINS" | tr ',' '\n' | sed 's/^ *//;s/ *$//' | jq -R . | jq -s .)
|
| 219 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".gateway.controlUi.allowedOrigins = $ORIGINS_JSON")
|
| 220 |
+
fi
|
| 221 |
+
|
| 222 |
+
# Telegram (supports multiple user IDs, comma-separated)
|
| 223 |
+
if [ -n "$TELEGRAM_BOT_TOKEN" ]; then
|
| 224 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq '.plugins.entries.telegram = {"enabled": true}')
|
| 225 |
+
export TELEGRAM_BOT_TOKEN="$TELEGRAM_BOT_TOKEN"
|
| 226 |
+
|
| 227 |
+
if [ -n "$TELEGRAM_USER_IDS" ]; then
|
| 228 |
+
# Convert comma-separated IDs to JSON array
|
| 229 |
+
IDS_JSON=$(echo "$TELEGRAM_USER_IDS" | tr ',' '\n' | sed 's/^ *//;s/ *$//' | jq -R . | jq -s .)
|
| 230 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".channels.telegram = {\"dmPolicy\": \"allowlist\", \"allowFrom\": $IDS_JSON}")
|
| 231 |
+
elif [ -n "$TELEGRAM_USER_ID" ]; then
|
| 232 |
+
# Single user (backward compatible)
|
| 233 |
+
CONFIG_JSON=$(echo "$CONFIG_JSON" | jq ".channels.telegram = {\"dmPolicy\": \"allowlist\", \"allowFrom\": [\"$TELEGRAM_USER_ID\"]}")
|
| 234 |
+
fi
|
| 235 |
+
fi
|
| 236 |
+
|
| 237 |
+
# Write config
|
| 238 |
+
echo "$CONFIG_JSON" > "/home/node/.openclaw/openclaw.json"
|
| 239 |
+
chmod 600 /home/node/.openclaw/openclaw.json
|
| 240 |
+
|
| 241 |
+
# ββ Startup Summary ββ
|
| 242 |
+
echo ""
|
| 243 |
+
echo " ββββββββββββββββββββββββββββββββββββββββββββ"
|
| 244 |
+
echo " β π Configuration Summary β"
|
| 245 |
+
echo " ββββββββββββββββββββββββββββββββββββββββββββ€"
|
| 246 |
+
printf " β %-40s β\n" "Model: $LLM_MODEL"
|
| 247 |
+
if [ -n "$TELEGRAM_BOT_TOKEN" ]; then
|
| 248 |
+
printf " β %-40s β\n" "Telegram: β
enabled"
|
| 249 |
+
else
|
| 250 |
+
printf " β %-40s β\n" "Telegram: β not configured"
|
| 251 |
+
fi
|
| 252 |
+
if [ -n "$HF_USERNAME" ] && [ -n "$HF_TOKEN" ]; then
|
| 253 |
+
printf " β %-40s β\n" "Backup: β
${HF_USERNAME}/${BACKUP_DATASET:-huggingclaw-backup}"
|
| 254 |
+
else
|
| 255 |
+
printf " β %-40s β\n" "Backup: β not configured"
|
| 256 |
+
fi
|
| 257 |
+
if [ -n "$OPENCLAW_PASSWORD" ]; then
|
| 258 |
+
printf " β %-40s β\n" "Auth: π password"
|
| 259 |
+
else
|
| 260 |
+
printf " β %-40s β\n" "Auth: π token"
|
| 261 |
+
fi
|
| 262 |
+
if [ -n "$SPACE_HOST" ]; then
|
| 263 |
+
printf " β %-40s β\n" "Keep-alive: β
every ${KEEP_ALIVE_INTERVAL:-300}s"
|
| 264 |
+
printf " β %-40s β\n" "Control UI: https://${SPACE_HOST}"
|
| 265 |
+
else
|
| 266 |
+
printf " β %-40s β\n" "Keep-alive: βΈοΈ local mode"
|
| 267 |
+
fi
|
| 268 |
+
SYNC_STATUS="β disabled"
|
| 269 |
+
if [ -n "$HF_USERNAME" ] && [ -n "$HF_TOKEN" ]; then
|
| 270 |
+
SYNC_STATUS="β
every ${SYNC_INTERVAL:-600}s"
|
| 271 |
+
fi
|
| 272 |
+
printf " β %-40s β\n" "Auto-sync: $SYNC_STATUS"
|
| 273 |
+
echo " ββββββββββββββββββββββββββββββββββββββββββββ"
|
| 274 |
+
echo ""
|
| 275 |
+
|
| 276 |
+
# ββ Trap SIGTERM for graceful shutdown ββ
|
| 277 |
+
graceful_shutdown() {
|
| 278 |
+
echo ""
|
| 279 |
+
echo "π Shutting down gracefully..."
|
| 280 |
+
|
| 281 |
+
# Commit any unsaved workspace changes
|
| 282 |
+
if [ -d "/home/node/.openclaw/workspace/.git" ]; then
|
| 283 |
+
echo "πΎ Saving workspace before exit..."
|
| 284 |
+
cd /home/node/.openclaw/workspace
|
| 285 |
+
git add -A 2>/dev/null
|
| 286 |
+
if ! git diff --cached --quiet 2>/dev/null; then
|
| 287 |
+
TIMESTAMP=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
| 288 |
+
git commit -m "Shutdown sync ${TIMESTAMP}" 2>/dev/null
|
| 289 |
+
git push origin main 2>/dev/null && echo " β
Workspace saved!" || echo " β οΈ Push failed"
|
| 290 |
+
else
|
| 291 |
+
echo " β
No unsaved changes"
|
| 292 |
+
fi
|
| 293 |
+
fi
|
| 294 |
+
|
| 295 |
+
# Kill background processes
|
| 296 |
+
kill $(jobs -p) 2>/dev/null
|
| 297 |
+
echo "π Goodbye!"
|
| 298 |
+
exit 0
|
| 299 |
+
}
|
| 300 |
+
trap graceful_shutdown SIGTERM SIGINT
|
| 301 |
+
|
| 302 |
+
# ββ Start background services ββ
|
| 303 |
+
node /home/node/app/health-server.js &
|
| 304 |
+
/home/node/app/keep-alive.sh &
|
| 305 |
+
|
| 306 |
+
python3 /home/node/app/workspace-sync.py &
|
| 307 |
+
|
| 308 |
+
# ββ Launch gateway ββ
|
| 309 |
+
echo "π Launching OpenClaw gateway on port 7860..."
|
| 310 |
+
echo ""
|
| 311 |
+
# Set model via environment for the gateway
|
| 312 |
+
export LLM_MODEL="$LLM_MODEL"
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
openclaw gateway run --port 7860 --bind lan --verbose 2>&1 | tee -a /home/node/.openclaw/gateway.log &
|
| 317 |
+
GATEWAY_PID=$!
|
| 318 |
+
|
| 319 |
+
# Wait a moment for startup errors
|
| 320 |
+
sleep 3
|
| 321 |
+
if ! kill -0 $GATEWAY_PID 2>/dev/null; then
|
| 322 |
+
echo ""
|
| 323 |
+
echo "β Gateway failed to start. Last 30 lines of log:"
|
| 324 |
+
echo "ββββββββββββββββββββββββββββββββββββββββββββ"
|
| 325 |
+
tail -30 /home/node/.openclaw/gateway.log
|
| 326 |
+
exit 1
|
| 327 |
+
fi
|
| 328 |
+
|
| 329 |
+
# Wait for gateway (allows trap to fire)
|
| 330 |
+
wait $GATEWAY_PID
|
workspace-sync.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
HuggingClaw Workspace Sync β HuggingFace Hub based backup
|
| 4 |
+
Uses huggingface_hub Python library instead of git for more reliable
|
| 5 |
+
HF Dataset operations (handles auth, LFS, retries automatically).
|
| 6 |
+
|
| 7 |
+
Falls back to git-based sync if HF_USERNAME or HF_TOKEN are not set.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
import sys
|
| 12 |
+
import time
|
| 13 |
+
import signal
|
| 14 |
+
import subprocess
|
| 15 |
+
from pathlib import Path
|
| 16 |
+
|
| 17 |
+
WORKSPACE = Path("/home/node/.openclaw/workspace")
|
| 18 |
+
INTERVAL = int(os.environ.get("SYNC_INTERVAL", "600"))
|
| 19 |
+
HF_TOKEN = os.environ.get("HF_TOKEN", "")
|
| 20 |
+
HF_USERNAME = os.environ.get("HF_USERNAME", "")
|
| 21 |
+
BACKUP_DATASET = os.environ.get("BACKUP_DATASET_NAME", "huggingclaw-backup")
|
| 22 |
+
|
| 23 |
+
running = True
|
| 24 |
+
|
| 25 |
+
def signal_handler(sig, frame):
|
| 26 |
+
global running
|
| 27 |
+
running = False
|
| 28 |
+
|
| 29 |
+
signal.signal(signal.SIGTERM, signal_handler)
|
| 30 |
+
signal.signal(signal.SIGINT, signal_handler)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def has_changes():
|
| 34 |
+
"""Check if workspace has uncommitted changes (git-based check)."""
|
| 35 |
+
try:
|
| 36 |
+
subprocess.run(["git", "add", "-A"], cwd=WORKSPACE, capture_output=True)
|
| 37 |
+
result = subprocess.run(
|
| 38 |
+
["git", "diff", "--cached", "--quiet"],
|
| 39 |
+
cwd=WORKSPACE, capture_output=True
|
| 40 |
+
)
|
| 41 |
+
return result.returncode != 0
|
| 42 |
+
except Exception:
|
| 43 |
+
return False
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def sync_with_hf_hub():
|
| 47 |
+
"""Sync workspace using huggingface_hub library."""
|
| 48 |
+
try:
|
| 49 |
+
from huggingface_hub import HfApi, upload_folder
|
| 50 |
+
|
| 51 |
+
api = HfApi(token=HF_TOKEN)
|
| 52 |
+
repo_id = f"{HF_USERNAME}/{BACKUP_DATASET}"
|
| 53 |
+
|
| 54 |
+
# Ensure dataset exists
|
| 55 |
+
try:
|
| 56 |
+
api.repo_info(repo_id=repo_id, repo_type="dataset")
|
| 57 |
+
except Exception:
|
| 58 |
+
print(f" π Creating dataset {repo_id}...")
|
| 59 |
+
try:
|
| 60 |
+
api.create_repo(repo_id=repo_id, repo_type="dataset", private=True)
|
| 61 |
+
print(f" β
Dataset created: {repo_id}")
|
| 62 |
+
except Exception as e:
|
| 63 |
+
print(f" β οΈ Could not create dataset: {e}")
|
| 64 |
+
return False
|
| 65 |
+
|
| 66 |
+
# Upload workspace
|
| 67 |
+
upload_folder(
|
| 68 |
+
folder_path=str(WORKSPACE),
|
| 69 |
+
repo_id=repo_id,
|
| 70 |
+
repo_type="dataset",
|
| 71 |
+
token=HF_TOKEN,
|
| 72 |
+
commit_message=f"Auto-sync {time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())}",
|
| 73 |
+
ignore_patterns=[".git/*", ".git"],
|
| 74 |
+
)
|
| 75 |
+
return True
|
| 76 |
+
|
| 77 |
+
except ImportError:
|
| 78 |
+
print(" β οΈ huggingface_hub not installed, falling back to git")
|
| 79 |
+
return False
|
| 80 |
+
except Exception as e:
|
| 81 |
+
print(f" β οΈ HF Hub sync failed: {e}")
|
| 82 |
+
return False
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def sync_with_git():
|
| 86 |
+
"""Fallback: sync workspace using git."""
|
| 87 |
+
try:
|
| 88 |
+
ts = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
| 89 |
+
subprocess.run(["git", "add", "-A"], cwd=WORKSPACE, capture_output=True)
|
| 90 |
+
subprocess.run(
|
| 91 |
+
["git", "commit", "-m", f"Auto-sync {ts}"],
|
| 92 |
+
cwd=WORKSPACE, capture_output=True
|
| 93 |
+
)
|
| 94 |
+
result = subprocess.run(
|
| 95 |
+
["git", "push", "origin", "main"],
|
| 96 |
+
cwd=WORKSPACE, capture_output=True
|
| 97 |
+
)
|
| 98 |
+
return result.returncode == 0
|
| 99 |
+
except Exception:
|
| 100 |
+
return False
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def main():
|
| 104 |
+
# Wait for workspace to initialize
|
| 105 |
+
time.sleep(30)
|
| 106 |
+
|
| 107 |
+
if not WORKSPACE.exists():
|
| 108 |
+
print("π Workspace sync: workspace not found, exiting.")
|
| 109 |
+
return
|
| 110 |
+
|
| 111 |
+
use_hf_hub = bool(HF_TOKEN and HF_USERNAME)
|
| 112 |
+
|
| 113 |
+
if use_hf_hub:
|
| 114 |
+
print(f"π Workspace sync started (huggingface_hub): every {INTERVAL}s β {HF_USERNAME}/{BACKUP_DATASET}")
|
| 115 |
+
else:
|
| 116 |
+
git_dir = WORKSPACE / ".git"
|
| 117 |
+
if not git_dir.exists():
|
| 118 |
+
print("π Workspace sync: no git repo and no HF credentials, skipping.")
|
| 119 |
+
return
|
| 120 |
+
print(f"π Workspace sync started (git): every {INTERVAL}s")
|
| 121 |
+
|
| 122 |
+
while running:
|
| 123 |
+
time.sleep(INTERVAL)
|
| 124 |
+
if not running:
|
| 125 |
+
break
|
| 126 |
+
|
| 127 |
+
if not has_changes():
|
| 128 |
+
continue
|
| 129 |
+
|
| 130 |
+
ts = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
| 131 |
+
|
| 132 |
+
if use_hf_hub:
|
| 133 |
+
if sync_with_hf_hub():
|
| 134 |
+
print(f"π Workspace sync (hf_hub): pushed changes ({ts})")
|
| 135 |
+
else:
|
| 136 |
+
# Fallback to git
|
| 137 |
+
if sync_with_git():
|
| 138 |
+
print(f"π Workspace sync (git fallback): pushed changes ({ts})")
|
| 139 |
+
else:
|
| 140 |
+
print(f"π Workspace sync: failed ({ts}), will retry")
|
| 141 |
+
else:
|
| 142 |
+
if sync_with_git():
|
| 143 |
+
print(f"π Workspace sync (git): pushed changes ({ts})")
|
| 144 |
+
else:
|
| 145 |
+
print(f"π Workspace sync: push failed ({ts}), will retry")
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
if __name__ == "__main__":
|
| 149 |
+
main()
|