|
|
#!/bin/sh |
|
|
|
|
|
|
|
|
if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then |
|
|
echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID" |
|
|
exec node ./src/app/app.js |
|
|
exit 0 |
|
|
fi |
|
|
|
|
|
|
|
|
. /opt/venv/bin/activate |
|
|
|
|
|
|
|
|
manage_backups() { |
|
|
python3 -c " |
|
|
from huggingface_hub import HfApi |
|
|
api = HfApi(token='$HF_TOKEN') |
|
|
try: |
|
|
files = api.list_repo_files(repo_id='$DATASET_ID', repo_type='dataset') |
|
|
backup_files = sorted([f for f in files if f.startswith('electerm_backup_') and f.endswith('.tar.gz')]) |
|
|
backup_count = int('$DATASET_N') if '$DATASET_N'.isdigit() else 5 |
|
|
if len(backup_files) > backup_count: |
|
|
for file in backup_files[:-backup_count]: |
|
|
api.delete_file(path_in_repo=file, repo_id='$DATASET_ID', repo_type='dataset') |
|
|
print(f'Deleted old backup: {file}') |
|
|
except Exception as e: |
|
|
print(f'Error managing backups: {str(e)}') |
|
|
" |
|
|
} |
|
|
|
|
|
|
|
|
upload_backup() { |
|
|
file_path="$1" |
|
|
file_name="$2" |
|
|
|
|
|
python3 -c " |
|
|
from huggingface_hub import HfApi |
|
|
api = HfApi(token='$HF_TOKEN') |
|
|
try: |
|
|
api.upload_file( |
|
|
path_or_fileobj='$file_path', |
|
|
path_in_repo='$file_name', |
|
|
repo_id='$DATASET_ID', |
|
|
repo_type='dataset' |
|
|
) |
|
|
print(f'Successfully uploaded $file_name') |
|
|
except Exception as e: |
|
|
print(f'Error uploading file: {str(e)}') |
|
|
" |
|
|
|
|
|
manage_backups |
|
|
} |
|
|
|
|
|
|
|
|
download_latest_backup() { |
|
|
python3 -c " |
|
|
from huggingface_hub import HfApi |
|
|
import tarfile, tempfile, os, sys |
|
|
api = HfApi(token='$HF_TOKEN') |
|
|
try: |
|
|
files = api.list_repo_files(repo_id='$DATASET_ID', repo_type='dataset') |
|
|
backup_files = [f for f in files if f.startswith('electerm_backup_') and f.endswith('.tar.gz')] |
|
|
if not backup_files: |
|
|
print('No backup files found') |
|
|
sys.exit() |
|
|
latest_backup = sorted(backup_files)[-1] |
|
|
with tempfile.TemporaryDirectory() as temp_dir: |
|
|
filepath = api.hf_hub_download( |
|
|
repo_id='$DATASET_ID', filename=latest_backup, |
|
|
repo_type='dataset', local_dir=temp_dir |
|
|
) |
|
|
if os.path.exists(filepath): |
|
|
with tarfile.open(filepath, 'r:gz') as tar: |
|
|
tar.extractall('/app/electerm-web/data') |
|
|
print(f'Successfully restored backup from {latest_backup}') |
|
|
except Exception as e: |
|
|
print(f'Error downloading backup: {str(e)}') |
|
|
" |
|
|
} |
|
|
|
|
|
|
|
|
echo "Downloading latest backup from HuggingFace..." |
|
|
download_latest_backup |
|
|
|
|
|
|
|
|
sync_data() { |
|
|
while true; do |
|
|
echo "Starting sync process at $(date)" |
|
|
if [ -d /app/electerm-web/data ]; then |
|
|
backup_file="electerm_backup_$(date +%Y%m%d_%H%M%S).tar.gz" |
|
|
tar -czf "/tmp/${backup_file}" -C /app/electerm-web/data . |
|
|
echo "Uploading backup to HuggingFace..." |
|
|
upload_backup "/tmp/${backup_file}" "${backup_file}" |
|
|
rm -f "/tmp/${backup_file}" |
|
|
else |
|
|
echo "Data directory does not exist yet, waiting for next sync..." |
|
|
fi |
|
|
sleep ${SYNC_INTERVAL:-7200} |
|
|
done |
|
|
} |
|
|
|
|
|
|
|
|
sync_data & |
|
|
|
|
|
|
|
|
exec node ./src/app/app.js |