Arshit Malik commited on
Commit
edbbeec
·
1 Parent(s): ecb7a1f

fix: python json config (no heredoc), fallback http server on 8080

Browse files
Files changed (2) hide show
  1. start.sh +60 -56
  2. sync_hub.py +17 -26
start.sh CHANGED
@@ -1,22 +1,22 @@
1
  #!/bin/bash
2
 
3
- echo "[boot] Configuring Kaggle credentials..."
4
  if [ -n "$KAGGLE_USERNAME" ] && [ -n "$KAGGLE_KEY" ]; then
5
  mkdir -p ~/.kaggle
6
  printf '{"username":"%s","key":"%s"}' "$KAGGLE_USERNAME" "$KAGGLE_KEY" > ~/.kaggle/kaggle.json
7
  chmod 600 ~/.kaggle/kaggle.json
8
  fi
9
 
10
- echo "[boot] Configuring YouTube client secrets..."
11
  if [ -n "$YOUTUBE_CLIENT_ID" ] && [ -n "$YOUTUBE_CLIENT_SECRET" ]; then
12
  python3 -c "
13
  import json,os
14
  d={'installed':{'client_id':os.environ['YOUTUBE_CLIENT_ID'],'project_id':'yt-ai-bot','auth_uri':'https://accounts.google.com/o/oauth2/auth','token_uri':'https://oauth2.googleapis.com/token','auth_provider_x509_cert_url':'https://www.googleapis.com/oauth2/v1/certs','client_secret':os.environ['YOUTUBE_CLIENT_SECRET'],'redirect_uris':['http://localhost']}}
15
- open('/app/client_secrets.json','w').write(json.dumps(d))
16
- print('[boot] client_secrets.json written')"
17
  fi
18
 
19
- echo "[boot] Pulling state from HF Dataset..."
20
  python3 -c "
21
  import sys; sys.path.insert(0,'/app')
22
  try:
@@ -24,71 +24,75 @@ try:
24
  from pathlib import Path
25
  pull_state(Path('/app'))
26
  except Exception as e:
27
- print(f'[boot] Hub pull skipped: {e}')
28
  "
29
 
30
- echo "[boot] Writing OpenClaw config..."
31
- mkdir -p ~/.openclaw
32
- GATEWAY_PASS="${OPENCLAW_PASSWORD:-arshit2025}"
33
- cat > ~/.openclaw/openclaw.json << JSONEOF
34
- {
35
- "models": {
36
- "providers": {
37
- "ollama": {
38
- "baseUrl": "http://127.0.0.1:11434/v1",
39
- "apiKey": "ollama-local",
40
- "api": "openai-responses",
41
- "models": [
42
- {
43
- "id": "qwen2.5-coder:7b-instruct",
44
- "name": "Qwen2.5-Coder 7B (local CPU)",
45
- "contextWindow": 8192,
46
- "maxTokens": 2048,
47
- "cost": {"input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0},
48
- "input": ["text"]
49
- }
50
- ]
51
- }
52
- }
53
- },
54
- "agents": {
55
- "defaults": {
56
- "model": {"primary": "ollama/qwen2.5-coder:7b-instruct"}
57
- }
58
- },
59
- "gateway": {
60
- "port": 8080,
61
- "password": "$GATEWAY_PASS"
62
- }
63
- }
64
- JSONEOF
65
- cp /app/SOUL.md ~/.openclaw/SOUL.md
66
-
67
- echo "[boot] Starting nginx..."
68
  nginx
69
 
70
- echo "[boot] Starting Ollama..."
71
  OLLAMA_HOST=127.0.0.1 ollama serve &
72
-
73
- echo "[boot] Waiting for Ollama..."
74
  for i in $(seq 1 60); do
75
- if curl -sf http://127.0.0.1:11434/api/tags > /dev/null 2>&1; then
76
- echo "[boot] Ollama ready after ${i}s"; break
77
- fi
78
  sleep 2
79
  done
80
-
81
- echo "[boot] Pulling qwen2.5-coder:7b-instruct..."
82
  ollama pull qwen2.5-coder:7b-instruct
83
 
84
- echo "[boot] Starting pipeline..."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  python3 /app/automation.py >> /app/pipeline.log 2>&1 &
86
  echo "[boot] Pipeline PID: $!"
87
 
88
- echo "[boot] Starting OpenClaw..."
89
  export OPENCLAW_API_PORT=8080
90
  export OLLAMA_HOST=http://127.0.0.1:11434
91
- openclaw gateway start || echo "[boot] OpenClaw gateway exited"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
93
- echo "[boot] Sleeping to keep container alive..."
94
  sleep infinity
 
1
  #!/bin/bash
2
 
3
+ echo "[boot] Kaggle creds..."
4
  if [ -n "$KAGGLE_USERNAME" ] && [ -n "$KAGGLE_KEY" ]; then
5
  mkdir -p ~/.kaggle
6
  printf '{"username":"%s","key":"%s"}' "$KAGGLE_USERNAME" "$KAGGLE_KEY" > ~/.kaggle/kaggle.json
7
  chmod 600 ~/.kaggle/kaggle.json
8
  fi
9
 
10
+ echo "[boot] YouTube secrets..."
11
  if [ -n "$YOUTUBE_CLIENT_ID" ] && [ -n "$YOUTUBE_CLIENT_SECRET" ]; then
12
  python3 -c "
13
  import json,os
14
  d={'installed':{'client_id':os.environ['YOUTUBE_CLIENT_ID'],'project_id':'yt-ai-bot','auth_uri':'https://accounts.google.com/o/oauth2/auth','token_uri':'https://oauth2.googleapis.com/token','auth_provider_x509_cert_url':'https://www.googleapis.com/oauth2/v1/certs','client_secret':os.environ['YOUTUBE_CLIENT_SECRET'],'redirect_uris':['http://localhost']}}
15
+ open('/app/client_secrets.json','w').write(json.dumps(d))"
16
+ echo "[boot] client_secrets.json written"
17
  fi
18
 
19
+ echo "[boot] HF Dataset pull..."
20
  python3 -c "
21
  import sys; sys.path.insert(0,'/app')
22
  try:
 
24
  from pathlib import Path
25
  pull_state(Path('/app'))
26
  except Exception as e:
27
+ print('[boot] pull skipped:', e)
28
  "
29
 
30
+ echo "[boot] Nginx..."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  nginx
32
 
33
+ echo "[boot] Ollama..."
34
  OLLAMA_HOST=127.0.0.1 ollama serve &
 
 
35
  for i in $(seq 1 60); do
36
+ curl -sf http://127.0.0.1:11434/api/tags > /dev/null 2>&1 && echo "[boot] Ollama ready after ${i}s" && break
 
 
37
  sleep 2
38
  done
 
 
39
  ollama pull qwen2.5-coder:7b-instruct
40
 
41
+ echo "[boot] OpenClaw config..."
42
+ mkdir -p ~/.openclaw
43
+ GPASS="${OPENCLAW_PASSWORD:-arshit2025}"
44
+ python3 -c "
45
+ import json, os
46
+ cfg = {
47
+ 'models': {
48
+ 'providers': {
49
+ 'ollama': {
50
+ 'baseUrl': 'http://127.0.0.1:11434/v1',
51
+ 'apiKey': 'ollama-local',
52
+ 'api': 'openai-responses',
53
+ 'models': [{
54
+ 'id': 'qwen2.5-coder:7b-instruct',
55
+ 'name': 'Qwen2.5-Coder 7B',
56
+ 'contextWindow': 8192,
57
+ 'maxTokens': 2048,
58
+ 'cost': {'input':0,'output':0,'cacheRead':0,'cacheWrite':0},
59
+ 'input': ['text']
60
+ }]
61
+ }
62
+ }
63
+ },
64
+ 'agents': {'defaults': {'model': {'primary': 'ollama/qwen2.5-coder:7b-instruct'}}},
65
+ 'gateway': {'port': 8080, 'password': os.environ.get('OPENCLAW_PASSWORD','arshit2025')}
66
+ }
67
+ json.dump(cfg, open(os.path.expanduser('~/.openclaw/openclaw.json'),'w'), indent=2)
68
+ print('[boot] openclaw.json written, gateway.password =', cfg['gateway']['password'])
69
+ "
70
+ cp /app/SOUL.md ~/.openclaw/SOUL.md
71
+
72
+ echo "[boot] Pipeline..."
73
  python3 /app/automation.py >> /app/pipeline.log 2>&1 &
74
  echo "[boot] Pipeline PID: $!"
75
 
76
+ echo "[boot] Starting OpenClaw gateway..."
77
  export OPENCLAW_API_PORT=8080
78
  export OLLAMA_HOST=http://127.0.0.1:11434
79
+ openclaw gateway start &
80
+ OC_PID=$!
81
+ sleep 5
82
+
83
+ if ! kill -0 $OC_PID 2>/dev/null; then
84
+ echo "[boot] OpenClaw died, starting fallback HTTP server on 8080..."
85
+ python3 -c "
86
+ from http.server import HTTPServer, BaseHTTPRequestHandler
87
+ class H(BaseHTTPRequestHandler):
88
+ def do_GET(self):
89
+ msg = b'<h1>Pipeline Running</h1><p>OpenClaw UI starting...</p>'
90
+ self.send_response(200); self.send_header('Content-Type','text/html')
91
+ self.end_headers(); self.wfile.write(msg)
92
+ def log_message(self, *a): pass
93
+ HTTPServer(('0.0.0.0', 8080), H).serve_forever()
94
+ " &
95
+ fi
96
 
97
+ echo "[boot] Done. Sleeping..."
98
  sleep infinity
sync_hub.py CHANGED
@@ -5,41 +5,32 @@ HF_TOKEN = os.environ.get("HF_TOKEN", "")
5
  DATASET_REPO = os.environ.get("OPENCLAW_DATASET_REPO", "")
6
  PERSISTENT_FILES = {"topic_history.json", "fact_history.json", "token.pickle"}
7
 
8
- _api = None
9
- def _get_api():
10
- global _api
11
- if _api is None:
12
- from huggingface_hub import HfApi
13
- _api = HfApi(token=HF_TOKEN)
14
- return _api
15
-
16
- def pull_state(base_dir: Path):
17
  if not DATASET_REPO or not HF_TOKEN:
18
- print("OPENCLAW_DATASET_REPO or HF_TOKEN not set, skipping pull"); return
19
  from huggingface_hub import hf_hub_download
20
- from huggingface_hub.utils import EntryNotFoundError, RepositoryNotFoundError
21
- print(f"Pulling state from {DATASET_REPO}...")
22
  for fname in PERSISTENT_FILES:
23
  try:
24
- hf_hub_download(repo_id=DATASET_REPO, filename=fname, repo_type="dataset",
25
- token=HF_TOKEN, local_dir=str(base_dir), local_dir_use_symlinks=False)
 
26
  print(f" pulled {fname}")
27
- except (EntryNotFoundError, RepositoryNotFoundError): pass
28
- except Exception as e: print(f" could not pull {fname}: {e}")
29
 
30
- def push_file(local_path: Path):
31
  if not DATASET_REPO or not HF_TOKEN: return
32
- if local_path.name not in PERSISTENT_FILES or not local_path.exists(): return
 
33
  try:
34
- _get_api().upload_file(
35
- path_or_fileobj=str(local_path), path_in_repo=local_path.name,
 
36
  repo_id=DATASET_REPO, repo_type="dataset", token=HF_TOKEN,
37
- commit_message=f"auto: {local_path.name}")
38
- print(f" synced {local_path.name} to Hub")
39
  except Exception as e:
40
- print(f" Hub sync failed for {local_path.name}: {e}")
41
 
42
- def push_all_state(base_dir: Path):
43
  for fname in PERSISTENT_FILES:
44
- p = base_dir / fname
45
- if p.exists(): push_file(p)
 
5
  DATASET_REPO = os.environ.get("OPENCLAW_DATASET_REPO", "")
6
  PERSISTENT_FILES = {"topic_history.json", "fact_history.json", "token.pickle"}
7
 
8
+ def pull_state(base_dir):
 
 
 
 
 
 
 
 
9
  if not DATASET_REPO or not HF_TOKEN:
10
+ print("Skipping hub pull: missing env vars"); return
11
  from huggingface_hub import hf_hub_download
 
 
12
  for fname in PERSISTENT_FILES:
13
  try:
14
+ hf_hub_download(repo_id=DATASET_REPO, filename=fname,
15
+ repo_type="dataset", token=HF_TOKEN,
16
+ local_dir=str(base_dir), local_dir_use_symlinks=False)
17
  print(f" pulled {fname}")
18
+ except Exception as e:
19
+ print(f" skipped {fname}: {e}")
20
 
21
+ def push_file(local_path):
22
  if not DATASET_REPO or not HF_TOKEN: return
23
+ p = Path(local_path)
24
+ if p.name not in PERSISTENT_FILES or not p.exists(): return
25
  try:
26
+ from huggingface_hub import HfApi
27
+ HfApi(token=HF_TOKEN).upload_file(
28
+ path_or_fileobj=str(p), path_in_repo=p.name,
29
  repo_id=DATASET_REPO, repo_type="dataset", token=HF_TOKEN,
30
+ commit_message=f"auto: {p.name}")
 
31
  except Exception as e:
32
+ print(f" push failed {p.name}: {e}")
33
 
34
+ def push_all_state(base_dir):
35
  for fname in PERSISTENT_FILES:
36
+ push_file(Path(base_dir) / fname)