Spaces:
Running
Running
Prabhas Jupalli commited on
Commit Β·
8f2ee4f
1
Parent(s): 225c950
Fix: Resolved HF runtime errors (casing, permissions, and module imports).
Browse files- Dockerfile +4 -0
- backend/database.py +25 -9
- backend/navigator.py +1 -1
- backend/nlp_api.py +9 -9
Dockerfile
CHANGED
|
@@ -19,6 +19,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends gcc python3-dev
|
|
| 19 |
|
| 20 |
USER user
|
| 21 |
ENV PATH="/home/user/.local/bin:$PATH"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
WORKDIR /home/user/app
|
| 23 |
|
| 24 |
# Install PyTorch CPU-only FIRST (largest package, separate cache layer)
|
|
|
|
| 19 |
|
| 20 |
USER user
|
| 21 |
ENV PATH="/home/user/.local/bin:$PATH"
|
| 22 |
+
USER root
|
| 23 |
+
RUN mkdir -p /data/db && chown -R user:user /data
|
| 24 |
+
USER user
|
| 25 |
+
|
| 26 |
WORKDIR /home/user/app
|
| 27 |
|
| 28 |
# Install PyTorch CPU-only FIRST (largest package, separate cache layer)
|
backend/database.py
CHANGED
|
@@ -3,15 +3,31 @@ import os
|
|
| 3 |
from datetime import datetime
|
| 4 |
|
| 5 |
# HF Native Persistence: Check if /data volume is mounted
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
os.makedirs(os.path.dirname(DB_FILE), exist_ok=True)
|
| 16 |
|
| 17 |
def init_db():
|
|
|
|
| 3 |
from datetime import datetime
|
| 4 |
|
| 5 |
# HF Native Persistence: Check if /data volume is mounted
|
| 6 |
+
def get_db_file_path():
|
| 7 |
+
# Primary choice: HF Persistent Storage Mount
|
| 8 |
+
if os.path.exists('/data'):
|
| 9 |
+
# Use a subdirectory to avoid permission issues at the root mount point
|
| 10 |
+
test_path = '/data/db/db.json'
|
| 11 |
+
try:
|
| 12 |
+
os.makedirs(os.path.dirname(test_path), exist_ok=True)
|
| 13 |
+
# Verify write access
|
| 14 |
+
with open(os.path.join(os.path.dirname(test_path), '.write_test'), 'w') as f:
|
| 15 |
+
f.write('test')
|
| 16 |
+
os.remove(os.path.join(os.path.dirname(test_path), '.write_test'))
|
| 17 |
+
print(f"π‘ HF Native Persistence: Verified. Storing data in {test_path}")
|
| 18 |
+
return test_path
|
| 19 |
+
except Exception as e:
|
| 20 |
+
print(f"β οΈ HF Native Persistence: (/data) exists but is not writable: {e}")
|
| 21 |
+
|
| 22 |
+
# Fallback: Local Storage
|
| 23 |
+
local_path = os.path.join(os.path.dirname(__file__), 'data', 'db.json')
|
| 24 |
+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
| 25 |
+
print(f"π» Local Persistence: Active. Storing data in {local_path}")
|
| 26 |
+
return local_path
|
| 27 |
+
|
| 28 |
+
DB_FILE = get_db_file_path()
|
| 29 |
+
|
| 30 |
+
# Ensure final data directory exists
|
| 31 |
os.makedirs(os.path.dirname(DB_FILE), exist_ok=True)
|
| 32 |
|
| 33 |
def init_db():
|
backend/navigator.py
CHANGED
|
@@ -23,7 +23,7 @@ import numpy as np
|
|
| 23 |
# ββββββββββββββββββββββββββββββββββββββββββββββ
|
| 24 |
# Model Definition β must match training architecture
|
| 25 |
# ββββββββββββββββββββββββββββββββββββββββββββββ
|
| 26 |
-
_MODEL_PATH = os.path.join(os.path.dirname(__file__), '..', '
|
| 27 |
|
| 28 |
_dqn_net = None
|
| 29 |
_dqn_mode = "unavailable"
|
|
|
|
| 23 |
# ββββββββββββββββββββββββββββββββββββββββββββββ
|
| 24 |
# Model Definition β must match training architecture
|
| 25 |
# ββββββββββββββββββββββββββββββββββββββββββββββ
|
| 26 |
+
_MODEL_PATH = os.path.join(os.path.dirname(__file__), '..', 'navigators', 'dqn_model.pth')
|
| 27 |
|
| 28 |
_dqn_net = None
|
| 29 |
_dqn_mode = "unavailable"
|
backend/nlp_api.py
CHANGED
|
@@ -255,7 +255,7 @@ def reset_database():
|
|
| 255 |
def get_resources():
|
| 256 |
"""Get all NLP learning resources with their grid positions and correct visited state"""
|
| 257 |
session_id = request.args.get('session_id', 'default')
|
| 258 |
-
from database import get_session
|
| 259 |
session = get_session(session_id)
|
| 260 |
visited_ids = set(str(v).strip() for v in session.get('visitedResources', []))
|
| 261 |
|
|
@@ -311,7 +311,7 @@ def move_agent():
|
|
| 311 |
def get_notifications():
|
| 312 |
"""Get all notifications for a session"""
|
| 313 |
session_id = request.args.get('session_id', 'default')
|
| 314 |
-
from database import get_session
|
| 315 |
session = get_session(session_id)
|
| 316 |
return jsonify(session.get('notifications', []))
|
| 317 |
|
|
@@ -327,7 +327,7 @@ def add_notification():
|
|
| 327 |
if not message:
|
| 328 |
return jsonify({'error': 'Message required'}), 400
|
| 329 |
|
| 330 |
-
from database import get_session, update_session
|
| 331 |
session = get_session(session_id)
|
| 332 |
if 'notifications' not in session:
|
| 333 |
session['notifications'] = []
|
|
@@ -351,7 +351,7 @@ def mark_notifications_read():
|
|
| 351 |
data = request.get_json()
|
| 352 |
session_id = data.get('session_id', 'default')
|
| 353 |
|
| 354 |
-
from database import get_session, update_session
|
| 355 |
session = get_session(session_id)
|
| 356 |
if 'notifications' in session:
|
| 357 |
for n in session['notifications']:
|
|
@@ -863,7 +863,7 @@ def get_learning_data():
|
|
| 863 |
ai_analysis = ""
|
| 864 |
xp_earned = 0
|
| 865 |
try:
|
| 866 |
-
from database import load_db
|
| 867 |
db = load_db()
|
| 868 |
# Find latest summary for this session (they contain session_id in their ID or we match title)
|
| 869 |
matching_summaries = [s for s in db.get('summaries', []) if f"summary_{session_id}" in s.get('id', '')]
|
|
@@ -883,7 +883,7 @@ def get_learning_data():
|
|
| 883 |
activity_heatmap = {}
|
| 884 |
activity_log = []
|
| 885 |
try:
|
| 886 |
-
from database import load_db
|
| 887 |
db = load_db()
|
| 888 |
all_summaries = db.get('summaries', [])
|
| 889 |
|
|
@@ -961,7 +961,7 @@ def get_learning_data():
|
|
| 961 |
def get_bookmarks():
|
| 962 |
"""Get all bookmarked resources for a session"""
|
| 963 |
session_id = request.args.get('session_id', 'default')
|
| 964 |
-
from database import get_bookmarks as get_db_bookmarks
|
| 965 |
return jsonify(get_db_bookmarks(session_id))
|
| 966 |
|
| 967 |
|
|
@@ -975,7 +975,7 @@ def add_bookmark():
|
|
| 975 |
if not resource_id:
|
| 976 |
return jsonify({'error': 'Resource ID required'}), 400
|
| 977 |
|
| 978 |
-
from database import add_bookmark as add_db_bookmark
|
| 979 |
add_db_bookmark(session_id, resource_id)
|
| 980 |
return jsonify({'status': 'success', 'resource_id': resource_id})
|
| 981 |
|
|
@@ -990,7 +990,7 @@ def remove_bookmark():
|
|
| 990 |
if not resource_id:
|
| 991 |
return jsonify({'error': 'Resource ID required'}), 400
|
| 992 |
|
| 993 |
-
from database import remove_bookmark as remove_db_bookmark
|
| 994 |
remove_db_bookmark(session_id, resource_id)
|
| 995 |
return jsonify({'status': 'success', 'resource_id': resource_id})
|
| 996 |
|
|
|
|
| 255 |
def get_resources():
|
| 256 |
"""Get all NLP learning resources with their grid positions and correct visited state"""
|
| 257 |
session_id = request.args.get('session_id', 'default')
|
| 258 |
+
from .database import get_session
|
| 259 |
session = get_session(session_id)
|
| 260 |
visited_ids = set(str(v).strip() for v in session.get('visitedResources', []))
|
| 261 |
|
|
|
|
| 311 |
def get_notifications():
|
| 312 |
"""Get all notifications for a session"""
|
| 313 |
session_id = request.args.get('session_id', 'default')
|
| 314 |
+
from .database import get_session
|
| 315 |
session = get_session(session_id)
|
| 316 |
return jsonify(session.get('notifications', []))
|
| 317 |
|
|
|
|
| 327 |
if not message:
|
| 328 |
return jsonify({'error': 'Message required'}), 400
|
| 329 |
|
| 330 |
+
from .database import get_session, update_session
|
| 331 |
session = get_session(session_id)
|
| 332 |
if 'notifications' not in session:
|
| 333 |
session['notifications'] = []
|
|
|
|
| 351 |
data = request.get_json()
|
| 352 |
session_id = data.get('session_id', 'default')
|
| 353 |
|
| 354 |
+
from .database import get_session, update_session
|
| 355 |
session = get_session(session_id)
|
| 356 |
if 'notifications' in session:
|
| 357 |
for n in session['notifications']:
|
|
|
|
| 863 |
ai_analysis = ""
|
| 864 |
xp_earned = 0
|
| 865 |
try:
|
| 866 |
+
from .database import load_db
|
| 867 |
db = load_db()
|
| 868 |
# Find latest summary for this session (they contain session_id in their ID or we match title)
|
| 869 |
matching_summaries = [s for s in db.get('summaries', []) if f"summary_{session_id}" in s.get('id', '')]
|
|
|
|
| 883 |
activity_heatmap = {}
|
| 884 |
activity_log = []
|
| 885 |
try:
|
| 886 |
+
from .database import load_db
|
| 887 |
db = load_db()
|
| 888 |
all_summaries = db.get('summaries', [])
|
| 889 |
|
|
|
|
| 961 |
def get_bookmarks():
|
| 962 |
"""Get all bookmarked resources for a session"""
|
| 963 |
session_id = request.args.get('session_id', 'default')
|
| 964 |
+
from .database import get_bookmarks as get_db_bookmarks
|
| 965 |
return jsonify(get_db_bookmarks(session_id))
|
| 966 |
|
| 967 |
|
|
|
|
| 975 |
if not resource_id:
|
| 976 |
return jsonify({'error': 'Resource ID required'}), 400
|
| 977 |
|
| 978 |
+
from .database import add_bookmark as add_db_bookmark
|
| 979 |
add_db_bookmark(session_id, resource_id)
|
| 980 |
return jsonify({'status': 'success', 'resource_id': resource_id})
|
| 981 |
|
|
|
|
| 990 |
if not resource_id:
|
| 991 |
return jsonify({'error': 'Resource ID required'}), 400
|
| 992 |
|
| 993 |
+
from .database import remove_bookmark as remove_db_bookmark
|
| 994 |
remove_db_bookmark(session_id, resource_id)
|
| 995 |
return jsonify({'status': 'success', 'resource_id': resource_id})
|
| 996 |
|