Spaces:
Sleeping
Sleeping
Orbit Automations commited on
Commit Β·
4a693cf
0
Parent(s):
Auto-sync from Julius-606/Orbit monorepo
Browse files- Dockerfile +45 -0
- README.md +13 -0
- __pycache__/main.cpython-312.pyc +0 -0
- alembic.ini +53 -0
- alembic/__pycache__/env.cpython-312.pyc +0 -0
- alembic/env.py +48 -0
- alembic/script.py.mako +22 -0
- alembic/versions/0252b66d5ffd_initial_orbit_schema.py +41 -0
- alembic/versions/7a8b9c0d1e2f_add_remarks_to_study_tasks.py +24 -0
- alembic/versions/__pycache__/0252b66d5ffd_initial_orbit_schema.cpython-312.pyc +0 -0
- app/__pycache__/main.cpython-312.pyc +0 -0
- app/api/deps.py +28 -0
- app/api/v1/api.py +27 -0
- app/api/v1/auth.py +26 -0
- app/api/v1/forex.py +55 -0
- app/api/v1/med_scholar.py +44 -0
- app/api/v1/tasks.py +154 -0
- app/core/__pycache__/config.cpython-312.pyc +0 -0
- app/core/config.py +63 -0
- app/core/notifications.py +0 -0
- app/core/security.py +0 -0
- app/db/__pycache__/init_db.cpython-312.pyc +0 -0
- app/db/__pycache__/session.cpython-312.pyc +0 -0
- app/db/init_db.py +79 -0
- app/db/redis_cache.py +0 -0
- app/db/session.py +46 -0
- app/main.py +138 -0
- app/models/__pycache__/study.cpython-312.pyc +0 -0
- app/models/study.py +42 -0
- app/routers/med_scholar.py +54 -0
- app/routers/orbit_ai.py +93 -0
- app/services/blast.py +55 -0
- app/services/cate.py +45 -0
- app/services/governor.py +43 -0
- app/services/llm.py +44 -0
- app/services/med_scan.py +43 -0
- app/services/memory.py +55 -0
- app/services/mt5_bridge.py +53 -0
- app/services/orbit_brain.py +173 -0
- app/services/psych_check.py +45 -0
- app/services/telegram_mod.py +48 -0
- app/worker/heartbeat.py +44 -0
- fix_db.py +39 -0
- inject_task.py +46 -0
- render.yaml +44 -0
- requirements.txt +30 -0
- test_neon_db.py +90 -0
Dockerfile
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/Dockerfile
|
| 3 |
+
# VERSION: 1.0.2 | SYSTEM: Hugging Face Spaces Optimization
|
| 4 |
+
################################################################################
|
| 5 |
+
#
|
| 6 |
+
# Changes:
|
| 7 |
+
# - Hugging Face Spaces expects the app to bind to port 7860 by default.
|
| 8 |
+
# - Switched the exposed port and CMD to 7860 for a zero-drawdown deployment.
|
| 9 |
+
|
| 10 |
+
FROM python:3.11-slim
|
| 11 |
+
|
| 12 |
+
WORKDIR /code
|
| 13 |
+
|
| 14 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
| 15 |
+
ENV PYTHONUNBUFFERED=1
|
| 16 |
+
|
| 17 |
+
# Install system dependencies
|
| 18 |
+
RUN apt-get update && apt-get install -y \
|
| 19 |
+
build-essential \
|
| 20 |
+
libpq-dev \
|
| 21 |
+
gcc \
|
| 22 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 23 |
+
|
| 24 |
+
# Copy requirements and install
|
| 25 |
+
COPY requirements.txt /code/requirements.txt
|
| 26 |
+
RUN pip install --no-cache-dir -r /code/requirements.txt
|
| 27 |
+
|
| 28 |
+
# Set up a new user (Hugging Face strictly requires a non-root user with ID 1000)
|
| 29 |
+
RUN useradd -m -u 1000 orbituser
|
| 30 |
+
USER orbituser
|
| 31 |
+
|
| 32 |
+
# Set home to the user's home directory
|
| 33 |
+
ENV HOME=/home/orbituser \
|
| 34 |
+
PATH=/home/orbituser/.local/bin:$PATH
|
| 35 |
+
|
| 36 |
+
WORKDIR $HOME/app
|
| 37 |
+
|
| 38 |
+
# Copy the rest of the code (chown ensures the new user owns the files)
|
| 39 |
+
COPY --chown=orbituser . $HOME/app
|
| 40 |
+
|
| 41 |
+
# Hugging Face default port
|
| 42 |
+
EXPOSE 7860
|
| 43 |
+
|
| 44 |
+
# Start command
|
| 45 |
+
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "7860"]
|
README.md
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Orbit Brain
|
| 3 |
+
emoji: πͺ
|
| 4 |
+
colorFrom: green
|
| 5 |
+
colorTo: blue
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
# Project Orbit: The Brain π§
|
| 11 |
+
|
| 12 |
+
This is the Life-OS backend powering Med-Scholar and Forex Guardian.
|
| 13 |
+
Deployed securely on Hugging Face Spaces.
|
__pycache__/main.cpython-312.pyc
ADDED
|
Binary file (3.62 kB). View file
|
|
|
alembic.ini
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The Database Time Machine / Alembic Config
|
| 3 |
+
# FILEPATH: backend/alembic.ini
|
| 4 |
+
# COMPONENT: DB Migrations
|
| 5 |
+
# ROLE: Tells Alembic where your Postgres database lives so it can upgrade/downgrade tables.
|
| 6 |
+
# VIBE: Ctrl+Z for your database schemas. Absolute lifesaver. βͺ
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
[alembic]
|
| 10 |
+
# Path to migration scripts
|
| 11 |
+
script_location = alembic
|
| 12 |
+
prepend_sys_path = .
|
| 13 |
+
version_path_separator = os
|
| 14 |
+
|
| 15 |
+
# We will override this in env.py with our actual settings,
|
| 16 |
+
# but Alembic complains if it's completely empty here.
|
| 17 |
+
sqlalchemy.url = postgresql+asyncpg://orbit_user:super_secret_password@localhost/orbit_db
|
| 18 |
+
|
| 19 |
+
[post_write_hooks]
|
| 20 |
+
|
| 21 |
+
[loggers]
|
| 22 |
+
keys = root,sqlalchemy,alembic
|
| 23 |
+
|
| 24 |
+
[handlers]
|
| 25 |
+
keys = console
|
| 26 |
+
|
| 27 |
+
[formatters]
|
| 28 |
+
keys = generic
|
| 29 |
+
|
| 30 |
+
[logger_root]
|
| 31 |
+
level = WARN
|
| 32 |
+
handlers = console
|
| 33 |
+
qualname =
|
| 34 |
+
|
| 35 |
+
[logger_sqlalchemy]
|
| 36 |
+
level = WARN
|
| 37 |
+
handlers =
|
| 38 |
+
qualname = sqlalchemy.engine
|
| 39 |
+
|
| 40 |
+
[logger_alembic]
|
| 41 |
+
level = INFO
|
| 42 |
+
handlers =
|
| 43 |
+
qualname = alembic
|
| 44 |
+
|
| 45 |
+
[handler_console]
|
| 46 |
+
class = StreamHandler
|
| 47 |
+
args = (sys.stderr,)
|
| 48 |
+
level = NOTSET
|
| 49 |
+
formatter = generic
|
| 50 |
+
|
| 51 |
+
[formatter_generic]
|
| 52 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 53 |
+
datefmt = %H:%M:%S
|
alembic/__pycache__/env.cpython-312.pyc
ADDED
|
Binary file (2.52 kB). View file
|
|
|
alembic/env.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/alembic/env.py
|
| 3 |
+
# VERSION: 1.0.1 | SYSTEM: Neon DB Auto-Correct
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
from logging.config import fileConfig
|
| 8 |
+
from sqlalchemy import pool
|
| 9 |
+
from sqlalchemy.engine import Connection
|
| 10 |
+
from sqlalchemy.ext.asyncio import async_engine_from_config
|
| 11 |
+
from alembic import context
|
| 12 |
+
|
| 13 |
+
# Import your settings and your Base models so Alembic can read them
|
| 14 |
+
from app.core.config import settings
|
| 15 |
+
from app.models.study import Base
|
| 16 |
+
|
| 17 |
+
config = context.config
|
| 18 |
+
|
| 19 |
+
# π THE FIX: Use the safely formatted async_database_url so migrations don't crash
|
| 20 |
+
config.set_main_option("sqlalchemy.url", settings.async_database_url)
|
| 21 |
+
|
| 22 |
+
if config.config_file_name is not None:
|
| 23 |
+
fileConfig(config.config_file_name)
|
| 24 |
+
|
| 25 |
+
target_metadata = Base.metadata
|
| 26 |
+
|
| 27 |
+
def do_run_migrations(connection: Connection) -> None:
|
| 28 |
+
context.configure(connection=connection, target_metadata=target_metadata)
|
| 29 |
+
with context.begin_transaction():
|
| 30 |
+
context.run_migrations()
|
| 31 |
+
|
| 32 |
+
async def run_async_migrations() -> None:
|
| 33 |
+
"""In this house, we run async migrations so we don't block the event loop."""
|
| 34 |
+
connectable = async_engine_from_config(
|
| 35 |
+
config.get_section(config.config_ini_section, {}),
|
| 36 |
+
prefix="sqlalchemy.",
|
| 37 |
+
poolclass=pool.NullPool,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
async with connectable.connect() as connection:
|
| 41 |
+
await connection.run_sync(do_run_migrations)
|
| 42 |
+
|
| 43 |
+
await connectable.dispose()
|
| 44 |
+
|
| 45 |
+
def run_migrations_online() -> None:
|
| 46 |
+
asyncio.run(run_async_migrations())
|
| 47 |
+
|
| 48 |
+
run_migrations_online()
|
alembic/script.py.mako
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""${message}
|
| 2 |
+
|
| 3 |
+
Revision ID: ${up_revision}
|
| 4 |
+
Revises: ${down_revision | comma,n}
|
| 5 |
+
Create Date: ${create_date}
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from alembic import op
|
| 9 |
+
import sqlalchemy as sa
|
| 10 |
+
${imports if imports else ""}
|
| 11 |
+
|
| 12 |
+
# revision identifiers, used by Alembic.
|
| 13 |
+
revision = ${repr(up_revision)}
|
| 14 |
+
down_revision = ${repr(down_revision)}
|
| 15 |
+
branch_labels = ${repr(branch_labels)}
|
| 16 |
+
depends_on = ${repr(depends_on)}
|
| 17 |
+
|
| 18 |
+
def upgrade():
|
| 19 |
+
${upgrades if upgrades else "pass"}
|
| 20 |
+
|
| 21 |
+
def downgrade():
|
| 22 |
+
${downgrades if downgrades else "pass"}
|
alembic/versions/0252b66d5ffd_initial_orbit_schema.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""initial_orbit_schema
|
| 2 |
+
|
| 3 |
+
Revision ID: 0252b66d5ffd
|
| 4 |
+
Revises:
|
| 5 |
+
Create Date: 2026-03-03 10:52:07.272398
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from alembic import op
|
| 9 |
+
import sqlalchemy as sa
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
# revision identifiers, used by Alembic.
|
| 13 |
+
revision = '0252b66d5ffd'
|
| 14 |
+
down_revision = None
|
| 15 |
+
branch_labels = None
|
| 16 |
+
depends_on = None
|
| 17 |
+
|
| 18 |
+
def upgrade():
|
| 19 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 20 |
+
op.create_table('study_tasks',
|
| 21 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 22 |
+
sa.Column('title', sa.String(), nullable=False),
|
| 23 |
+
sa.Column('subject', sa.String(), nullable=False),
|
| 24 |
+
sa.Column('due_date', sa.DateTime(), nullable=True),
|
| 25 |
+
sa.Column('completed', sa.Boolean(), nullable=True),
|
| 26 |
+
sa.Column('brain_rot_level', sa.Enum('CHILL', 'MID', 'COOKED', name='brainrotlevel'), nullable=True),
|
| 27 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 28 |
+
sa.PrimaryKeyConstraint('id')
|
| 29 |
+
)
|
| 30 |
+
op.create_index(op.f('ix_study_tasks_id'), 'study_tasks', ['id'], unique=False)
|
| 31 |
+
op.create_index(op.f('ix_study_tasks_subject'), 'study_tasks', ['subject'], unique=False)
|
| 32 |
+
op.create_index(op.f('ix_study_tasks_title'), 'study_tasks', ['title'], unique=False)
|
| 33 |
+
# ### end Alembic commands ###
|
| 34 |
+
|
| 35 |
+
def downgrade():
|
| 36 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 37 |
+
op.drop_index(op.f('ix_study_tasks_title'), table_name='study_tasks')
|
| 38 |
+
op.drop_index(op.f('ix_study_tasks_subject'), table_name='study_tasks')
|
| 39 |
+
op.drop_index(op.f('ix_study_tasks_id'), table_name='study_tasks')
|
| 40 |
+
op.drop_table('study_tasks')
|
| 41 |
+
# ### end Alembic commands ###
|
alembic/versions/7a8b9c0d1e2f_add_remarks_to_study_tasks.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add remarks to study tasks
|
| 2 |
+
|
| 3 |
+
Revision ID: 7a8b9c0d1e2f
|
| 4 |
+
Revises: 0252b66d5ffd
|
| 5 |
+
Create Date: 2026-04-15 12:00:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from alembic import op
|
| 9 |
+
import sqlalchemy as sa
|
| 10 |
+
|
| 11 |
+
# revision identifiers, used by Alembic.
|
| 12 |
+
revision = '7a8b9c0d1e2f'
|
| 13 |
+
down_revision = '0252b66d5ffd'
|
| 14 |
+
branch_labels = None
|
| 15 |
+
depends_on = None
|
| 16 |
+
|
| 17 |
+
def upgrade():
|
| 18 |
+
# Add remarks and is_reminder columns to study_tasks
|
| 19 |
+
op.add_column('study_tasks', sa.Column('remarks', sa.Text(), nullable=True))
|
| 20 |
+
op.add_column('study_tasks', sa.Column('is_reminder', sa.Boolean(), nullable=True, server_default='false'))
|
| 21 |
+
|
| 22 |
+
def downgrade():
|
| 23 |
+
op.drop_column('study_tasks', 'is_reminder')
|
| 24 |
+
op.drop_column('study_tasks', 'remarks')
|
alembic/versions/__pycache__/0252b66d5ffd_initial_orbit_schema.cpython-312.pyc
ADDED
|
Binary file (2.67 kB). View file
|
|
|
app/__pycache__/main.cpython-312.pyc
ADDED
|
Binary file (6.25 kB). View file
|
|
|
app/api/deps.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#SFILE: backend/app/api/deps.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from fastapi import Depends, HTTPException, status
|
| 7 |
+
from fastapi.security import OAuth2PasswordBearer
|
| 8 |
+
# π FIX: Added 'app.' prefix
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("API-Bouncer")
|
| 13 |
+
|
| 14 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
| 15 |
+
|
| 16 |
+
async def verify_device_access(token: str = Depends(oauth2_scheme)):
|
| 17 |
+
"""
|
| 18 |
+
If someone finds your server IP, this stops them from randomly adding
|
| 19 |
+
fake tasks or closing your MT5 trades.
|
| 20 |
+
"""
|
| 21 |
+
if token != settings.SECRET_KEY:
|
| 22 |
+
logger.warning("Intruder alert! Someone tried to breach Orbit with a fake token.")
|
| 23 |
+
raise HTTPException(
|
| 24 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 25 |
+
detail="Opps detected. Access denied.",
|
| 26 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 27 |
+
)
|
| 28 |
+
return True
|
app/api/v1/api.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/api/v1/api.py
|
| 3 |
+
# VERSION: 1.2.0 | SYSTEM: Orbit Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter
|
| 7 |
+
# We are importing the versioned routers to ensure data integrity
|
| 8 |
+
from app.api.v1 import forex, tasks
|
| 9 |
+
|
| 10 |
+
# THE FIX: We are finally inviting orbit_ai to the party so it stops giving 404s π
|
| 11 |
+
from app.routers import med_scholar, orbit_ai
|
| 12 |
+
|
| 13 |
+
api_router = APIRouter()
|
| 14 |
+
|
| 15 |
+
# Mount the Forex Guardian π
|
| 16 |
+
api_router.include_router(forex.router, prefix="/forex", tags=["Risk Management"])
|
| 17 |
+
|
| 18 |
+
# Mount the Med-Scholar (The Dean) π©Ί
|
| 19 |
+
api_router.include_router(med_scholar.router, tags=["Syllabus Vault"])
|
| 20 |
+
|
| 21 |
+
# Mount the Life Tasks π
|
| 22 |
+
api_router.include_router(tasks.router, prefix="/tasks", tags=["Life Admin"])
|
| 23 |
+
|
| 24 |
+
# Mount the Brain! π§ (This is what you were missing)
|
| 25 |
+
api_router.include_router(orbit_ai.router, tags=["Orbit-AI"])
|
| 26 |
+
|
| 27 |
+
# Vibe Check: All routes are now synchronized. WAGMI. π
|
app/api/v1/auth.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/api/v1/auth.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 7 |
+
from fastapi.security import OAuth2PasswordRequestForm
|
| 8 |
+
# π FIX: Added 'app.' prefix
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
|
| 13 |
+
@router.post("/token")
|
| 14 |
+
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
|
| 15 |
+
"""
|
| 16 |
+
Your laptop/phone sends a username and password here.
|
| 17 |
+
If it matches the Vault, we give them a token.
|
| 18 |
+
"""
|
| 19 |
+
if form_data.username != "orbit_admin" or form_data.password != "admin_password":
|
| 20 |
+
raise HTTPException(
|
| 21 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 22 |
+
detail="Incorrect username or password. Are you an opp?",
|
| 23 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
return {"access_token": settings.SECRET_KEY, "token_type": "bearer"}
|
app/api/v1/forex.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The Risk Manager / Forex Guardian
|
| 3 |
+
# FILEPATH: backend/app/api/v1/forex.py
|
| 4 |
+
# COMPONENT: Backend API Routes
|
| 5 |
+
# ROLE: Bridges MT5 to Orbit. Watches for SL/TP, checks leverage.
|
| 6 |
+
# VIBE: The friend who takes your phone away when you try to revenge trade. πβ
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
from fastapi import APIRouter, HTTPException
|
| 10 |
+
from pydantic import BaseModel
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
router = APIRouter()
|
| 14 |
+
logger = logging.getLogger("Forex-Guardian")
|
| 15 |
+
|
| 16 |
+
# Schema for incoming MT5 alerts
|
| 17 |
+
class TradeAlert(BaseModel):
|
| 18 |
+
pair: str
|
| 19 |
+
action: str # e.g., "TP_HIT", "SL_HIT", "OVER_LEVERAGED"
|
| 20 |
+
pnl: float
|
| 21 |
+
message: str
|
| 22 |
+
|
| 23 |
+
@router.post("/alert")
|
| 24 |
+
async def trigger_trade_alert(alert: TradeAlert):
|
| 25 |
+
"""
|
| 26 |
+
Endpoint hit by the local MT5 bridge when something spicy happens in the market.
|
| 27 |
+
"""
|
| 28 |
+
logger.info(f"Forex Alert: {alert.pair} - {alert.action}. PNL: {alert.pnl}")
|
| 29 |
+
|
| 30 |
+
# Priority scaling based on what happened
|
| 31 |
+
priority = "HIGH" if alert.action in ["SL_HIT", "OVER_LEVERAGED"] else "NORMAL"
|
| 32 |
+
|
| 33 |
+
# TODO: Inject the WebSockets manager here to blast the notification
|
| 34 |
+
# to the Android app and Laptop UI simultaneously.
|
| 35 |
+
|
| 36 |
+
# Guardian Psych-Check Logic Placeholder
|
| 37 |
+
if alert.action == "OVER_LEVERAGED":
|
| 38 |
+
warning_msg = f"Yo, we're over-leveraged on {alert.pair}. Orbit is advising you to close a position. Don't be a hero!"
|
| 39 |
+
# This message gets sent to the TTS engine on the phone/laptop
|
| 40 |
+
return {"status": "Alert Broadcasted", "tts_payload": warning_msg, "priority": priority}
|
| 41 |
+
|
| 42 |
+
return {"status": "Alert Processed", "data": alert}
|
| 43 |
+
|
| 44 |
+
@router.get("/risk-audit")
|
| 45 |
+
async def run_risk_audit():
|
| 46 |
+
"""
|
| 47 |
+
Checks total exposure. Keeps you from revenge trading when Anatomy gets too stressful.
|
| 48 |
+
"""
|
| 49 |
+
# Placeholder for MT5 Python API logic
|
| 50 |
+
exposure_safe = True
|
| 51 |
+
|
| 52 |
+
if not exposure_safe:
|
| 53 |
+
raise HTTPException(status_code=400, detail="Risk limit exceeded. Step away from the charts.")
|
| 54 |
+
|
| 55 |
+
return {"status": "Risk levels acceptable. Let him cook."}
|
app/api/v1/med_scholar.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/api/v1/med_scholar.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 7 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 8 |
+
from sqlalchemy.future import select
|
| 9 |
+
# π FIX: Added 'app.' prefixes
|
| 10 |
+
from app.db.session import get_db
|
| 11 |
+
from app.models.study import StudyTask
|
| 12 |
+
from pydantic import BaseModel
|
| 13 |
+
from typing import List
|
| 14 |
+
|
| 15 |
+
router = APIRouter()
|
| 16 |
+
|
| 17 |
+
class TaskCreate(BaseModel):
|
| 18 |
+
title: str
|
| 19 |
+
subject: str
|
| 20 |
+
brain_rot_level: str = "mid"
|
| 21 |
+
|
| 22 |
+
@router.post("/tasks/", response_model=dict)
|
| 23 |
+
async def create_study_task(task: TaskCreate, db: AsyncSession = Depends(get_db)):
|
| 24 |
+
"""Drop a new task into the syllabus vault."""
|
| 25 |
+
new_task = StudyTask(
|
| 26 |
+
title=task.title,
|
| 27 |
+
subject=task.subject,
|
| 28 |
+
brain_rot_level=task.brain_rot_level
|
| 29 |
+
)
|
| 30 |
+
db.add(new_task)
|
| 31 |
+
await db.commit()
|
| 32 |
+
await db.refresh(new_task)
|
| 33 |
+
return {"status": "W", "message": "Task added. Time to lock in.", "task_id": new_task.id}
|
| 34 |
+
|
| 35 |
+
@router.get("/tasks/pending", response_model=List[dict])
|
| 36 |
+
async def get_pending_tasks(db: AsyncSession = Depends(get_db)):
|
| 37 |
+
"""Shows you exactly how behind you are in Med School."""
|
| 38 |
+
result = await db.execute(select(StudyTask).where(StudyTask.completed == False))
|
| 39 |
+
tasks = result.scalars().all()
|
| 40 |
+
|
| 41 |
+
if not tasks:
|
| 42 |
+
return [{"message": "Zero pending tasks. You're either a genius or you forgot to write them down."}]
|
| 43 |
+
|
| 44 |
+
return [{"id": t.id, "title": t.title, "subject": t.subject} for t in tasks]
|
app/api/v1/tasks.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/api/v1/tasks.py
|
| 3 |
+
# VERSION: 4.0.0 | SYSTEM: Orbit Protocol
|
| 4 |
+
# IDENTITY: Task Management / Life Admin Router - V4.0.0 Upgrades
|
| 5 |
+
################################################################################
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from typing import List, Optional
|
| 10 |
+
import logging
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
|
| 13 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 14 |
+
from sqlalchemy.future import select
|
| 15 |
+
from app.db.session import get_db
|
| 16 |
+
from app.models.study import StudyTask, BrainRotLevel
|
| 17 |
+
|
| 18 |
+
from app.services.governor import governor
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger("orbit_tasks")
|
| 21 |
+
router = APIRouter()
|
| 22 |
+
|
| 23 |
+
class TaskCreatePayload(BaseModel):
|
| 24 |
+
title: str
|
| 25 |
+
subject: str = "Life Admin"
|
| 26 |
+
brain_rot_level: str = "mid"
|
| 27 |
+
is_reminder: bool = False
|
| 28 |
+
due_date: Optional[datetime] = None
|
| 29 |
+
|
| 30 |
+
class TaskCompletePayload(BaseModel):
|
| 31 |
+
remarks: Optional[str] = None
|
| 32 |
+
|
| 33 |
+
@router.get("/", status_code=status.HTTP_200_OK)
|
| 34 |
+
async def get_all_tasks(db: AsyncSession = Depends(get_db)):
|
| 35 |
+
"""Fetch all active tasks with full metadata."""
|
| 36 |
+
try:
|
| 37 |
+
current_vibe = governor.get_current_recommendation() if hasattr(governor, 'get_current_recommendation') else "Grind Mode Activated"
|
| 38 |
+
|
| 39 |
+
result = await db.execute(select(StudyTask))
|
| 40 |
+
tasks = result.scalars().all()
|
| 41 |
+
|
| 42 |
+
task_list = [
|
| 43 |
+
{
|
| 44 |
+
"id": t.id,
|
| 45 |
+
"title": t.title,
|
| 46 |
+
"subject": t.subject,
|
| 47 |
+
"brain_rot_level": t.brain_rot_level.value if hasattr(t.brain_rot_level, 'value') else t.brain_rot_level,
|
| 48 |
+
"due_date": t.due_date.isoformat() if t.due_date else None,
|
| 49 |
+
"is_reminder": t.is_reminder,
|
| 50 |
+
"completed": t.completed,
|
| 51 |
+
"remarks": t.remarks
|
| 52 |
+
} for t in tasks
|
| 53 |
+
]
|
| 54 |
+
|
| 55 |
+
return {
|
| 56 |
+
"status": "bullish π",
|
| 57 |
+
"message": "Tasks loaded successfully.",
|
| 58 |
+
"governor_status": current_vibe,
|
| 59 |
+
"data": task_list
|
| 60 |
+
}
|
| 61 |
+
except Exception as e:
|
| 62 |
+
logger.error(f"Task fetch liquidated: {str(e)}")
|
| 63 |
+
raise HTTPException(status_code=500, detail="Failed to fetch tasks.")
|
| 64 |
+
|
| 65 |
+
@router.get("/pending", status_code=status.HTTP_200_OK)
|
| 66 |
+
async def get_pending_tasks(db: AsyncSession = Depends(get_db)):
|
| 67 |
+
"""Fetch only the pending tasks for the Android sorting logic."""
|
| 68 |
+
try:
|
| 69 |
+
logger.info("Mobile hit /pending. Sorting by Brain Rot level soon.")
|
| 70 |
+
|
| 71 |
+
result = await db.execute(select(StudyTask).where(StudyTask.completed == False))
|
| 72 |
+
tasks = result.scalars().all()
|
| 73 |
+
|
| 74 |
+
task_list = [
|
| 75 |
+
{
|
| 76 |
+
"id": t.id,
|
| 77 |
+
"title": t.title,
|
| 78 |
+
"subject": t.subject,
|
| 79 |
+
"brain_rot_level": t.brain_rot_level.value if hasattr(t.brain_rot_level, 'value') else t.brain_rot_level,
|
| 80 |
+
"due_date": t.due_date.isoformat() if t.due_date else None,
|
| 81 |
+
"is_reminder": t.is_reminder
|
| 82 |
+
} for t in tasks
|
| 83 |
+
]
|
| 84 |
+
|
| 85 |
+
return {
|
| 86 |
+
"status": "bullish π",
|
| 87 |
+
"message": "Pending tasks secured.",
|
| 88 |
+
"data": task_list
|
| 89 |
+
}
|
| 90 |
+
except Exception as e:
|
| 91 |
+
logger.error(f"Pending tasks fetch liquidated: {str(e)}")
|
| 92 |
+
raise HTTPException(status_code=500, detail="Failed to fetch pending tasks.")
|
| 93 |
+
|
| 94 |
+
@router.post("/", status_code=status.HTTP_201_CREATED)
|
| 95 |
+
async def create_task(task_data: TaskCreatePayload, db: AsyncSession = Depends(get_db)):
|
| 96 |
+
"""Inject a new task into the Brain. π§ """
|
| 97 |
+
try:
|
| 98 |
+
rot_map = {"chill": BrainRotLevel.CHILL, "mid": BrainRotLevel.MID, "cooked": BrainRotLevel.COOKED}
|
| 99 |
+
safe_rot = rot_map.get(task_data.brain_rot_level.lower(), BrainRotLevel.MID)
|
| 100 |
+
|
| 101 |
+
new_task = StudyTask(
|
| 102 |
+
title=task_data.title,
|
| 103 |
+
subject=task_data.subject,
|
| 104 |
+
brain_rot_level=safe_rot,
|
| 105 |
+
is_reminder=task_data.is_reminder,
|
| 106 |
+
due_date=task_data.due_date
|
| 107 |
+
)
|
| 108 |
+
db.add(new_task)
|
| 109 |
+
await db.commit()
|
| 110 |
+
|
| 111 |
+
return {
|
| 112 |
+
"status": "success",
|
| 113 |
+
"message": "Task injected into Orbit. π―"
|
| 114 |
+
}
|
| 115 |
+
except Exception as e:
|
| 116 |
+
logger.error(f"Failed to create task: {str(e)}")
|
| 117 |
+
await db.rollback()
|
| 118 |
+
raise HTTPException(status_code=400, detail="Task creation failed.")
|
| 119 |
+
|
| 120 |
+
@router.post("/{task_id}/complete", status_code=status.HTTP_200_OK)
|
| 121 |
+
async def complete_task(task_id: int, payload: TaskCompletePayload, db: AsyncSession = Depends(get_db)):
|
| 122 |
+
"""The CompleteTask Endpoint: Captures remarks and marks as finished."""
|
| 123 |
+
try:
|
| 124 |
+
logger.info(f"Completing task {task_id} with remarks: {payload.remarks}")
|
| 125 |
+
result = await db.execute(select(StudyTask).where(StudyTask.id == task_id))
|
| 126 |
+
task = result.scalars().first()
|
| 127 |
+
|
| 128 |
+
if not task:
|
| 129 |
+
raise HTTPException(status_code=404, detail="Task not found.")
|
| 130 |
+
|
| 131 |
+
task.completed = True
|
| 132 |
+
task.remarks = payload.remarks
|
| 133 |
+
|
| 134 |
+
await db.commit()
|
| 135 |
+
return {"status": "success", "message": "Task secured and remarks filed. β
"}
|
| 136 |
+
except Exception as e:
|
| 137 |
+
await db.rollback()
|
| 138 |
+
logger.error(f"Failed to complete task: {str(e)}")
|
| 139 |
+
raise HTTPException(status_code=500, detail="Error completing task.")
|
| 140 |
+
|
| 141 |
+
@router.delete("/{task_id}", status_code=status.HTTP_200_OK)
|
| 142 |
+
async def delete_task(task_id: int, db: AsyncSession = Depends(get_db)):
|
| 143 |
+
"""Wipe a task from existence. Hard delete."""
|
| 144 |
+
try:
|
| 145 |
+
result = await db.execute(select(StudyTask).where(StudyTask.id == task_id))
|
| 146 |
+
task = result.scalars().first()
|
| 147 |
+
if task:
|
| 148 |
+
await db.delete(task)
|
| 149 |
+
await db.commit()
|
| 150 |
+
return {"status": "deleted", "message": "Task wiped."}
|
| 151 |
+
return {"status": "error", "message": "Task not found."}
|
| 152 |
+
except Exception as e:
|
| 153 |
+
await db.rollback()
|
| 154 |
+
raise HTTPException(status_code=500, detail="Error deleting task.")
|
app/core/__pycache__/config.cpython-312.pyc
ADDED
|
Binary file (2.43 kB). View file
|
|
|
app/core/config.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/core/config.py
|
| 3 |
+
# VERSION: 1.1.4 | SYSTEM: Pydantic Fix (No-JSON-List)
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from pydantic_settings import BaseSettings
|
| 7 |
+
from pydantic import field_validator
|
| 8 |
+
from typing import Optional, List, Any
|
| 9 |
+
import os
|
| 10 |
+
|
| 11 |
+
class Settings(BaseSettings):
|
| 12 |
+
PROJECT_NAME: str = "Project Orbit: Jarvis Protocol"
|
| 13 |
+
VERSION: str = "3.1.0"
|
| 14 |
+
|
| 15 |
+
# DB Settings - Default to localhost for local testing
|
| 16 |
+
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://orbit_user:super_secret_password@localhost:5432/orbit_db")
|
| 17 |
+
REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
| 18 |
+
|
| 19 |
+
# π‘οΈ SECURITY (The Bouncer's VIP List)
|
| 20 |
+
SECRET_KEY: str = os.getenv("SECRET_KEY", "3ATLNDwN6SfiTQfyfEjxQpxsRtj_6dzR8QzKxpXeZn8Nn76n4")
|
| 21 |
+
ALGORITHM: str = "HS256"
|
| 22 |
+
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 7
|
| 23 |
+
|
| 24 |
+
# Agent APIs
|
| 25 |
+
GEMINI_API_KEY: Optional[str] = os.getenv("GEMINI_API_KEY")
|
| 26 |
+
|
| 27 |
+
# We change this to str so Pydantic doesn't try to parse it as a JSON list
|
| 28 |
+
GEMINI_API_KEYS_RAW: str = os.getenv("GEMINI_API_KEYS", "")
|
| 29 |
+
|
| 30 |
+
# Forex Guardian (MT5)
|
| 31 |
+
MT5_LOGIN: Optional[int] = None
|
| 32 |
+
MT5_PASSWORD: Optional[str] = None
|
| 33 |
+
MT5_SERVER: Optional[str] = None
|
| 34 |
+
|
| 35 |
+
@property
|
| 36 |
+
def GEMINI_API_KEYS(self) -> List[str]:
|
| 37 |
+
"""Parsed list of keys from the raw string."""
|
| 38 |
+
return [k.strip() for k in self.GEMINI_API_KEYS_RAW.split(",") if k.strip()]
|
| 39 |
+
|
| 40 |
+
@property
|
| 41 |
+
def async_database_url(self) -> str:
|
| 42 |
+
"""π THE ULTIMATE NEON FIX"""
|
| 43 |
+
url = self.DATABASE_URL
|
| 44 |
+
if url and url.startswith("postgresql://"):
|
| 45 |
+
url = url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
| 46 |
+
|
| 47 |
+
if url and "?" in url:
|
| 48 |
+
url = url.split("?")[0]
|
| 49 |
+
|
| 50 |
+
return url + "?ssl=require" if url else ""
|
| 51 |
+
|
| 52 |
+
def get_all_api_keys(self) -> List[str]:
|
| 53 |
+
keys = self.GEMINI_API_KEYS.copy()
|
| 54 |
+
if self.GEMINI_API_KEY and self.GEMINI_API_KEY not in keys:
|
| 55 |
+
keys.append(self.GEMINI_API_KEY)
|
| 56 |
+
return keys
|
| 57 |
+
|
| 58 |
+
class Config:
|
| 59 |
+
env_file = ".env"
|
| 60 |
+
extra = "ignore" # Ignore extra env vars
|
| 61 |
+
case_sensitive = True
|
| 62 |
+
|
| 63 |
+
settings = Settings()
|
app/core/notifications.py
ADDED
|
File without changes
|
app/core/security.py
ADDED
|
File without changes
|
app/db/__pycache__/init_db.cpython-312.pyc
ADDED
|
Binary file (2.71 kB). View file
|
|
|
app/db/__pycache__/session.cpython-312.pyc
ADDED
|
Binary file (1.67 kB). View file
|
|
|
app/db/init_db.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/db/init_db.py
|
| 3 |
+
# VERSION: 1.1.0 | SYSTEM: Neon DB Auto-Migration π
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
from sqlalchemy import text
|
| 8 |
+
from app.db.session import async_session, engine
|
| 9 |
+
from app.models.study import Base, StudyTask, BrainRotLevel
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("Orbit-Genesis")
|
| 13 |
+
|
| 14 |
+
async def ensure_columns_exist():
|
| 15 |
+
"""
|
| 16 |
+
Ensures that Orbit v4.0.0 columns exist in the database.
|
| 17 |
+
This handles migrations manually for environments like HF/Render where
|
| 18 |
+
Alembic might not have run or where the DB was created before the schema update.
|
| 19 |
+
"""
|
| 20 |
+
async with engine.begin() as conn:
|
| 21 |
+
logger.info("Checking database schema for Orbit v4.0.0 updates...")
|
| 22 |
+
|
| 23 |
+
# 1. Check for 'remarks' column
|
| 24 |
+
result = await conn.execute(text("""
|
| 25 |
+
SELECT column_name
|
| 26 |
+
FROM information_schema.columns
|
| 27 |
+
WHERE table_name='study_tasks' AND column_name='remarks';
|
| 28 |
+
"""))
|
| 29 |
+
if not result.fetchone():
|
| 30 |
+
logger.info("Adding missing 'remarks' column to 'study_tasks'...")
|
| 31 |
+
await conn.execute(text("ALTER TABLE study_tasks ADD COLUMN remarks TEXT;"))
|
| 32 |
+
|
| 33 |
+
# 2. Check for 'is_reminder' column
|
| 34 |
+
result = await conn.execute(text("""
|
| 35 |
+
SELECT column_name
|
| 36 |
+
FROM information_schema.columns
|
| 37 |
+
WHERE table_name='study_tasks' AND column_name='is_reminder';
|
| 38 |
+
"""))
|
| 39 |
+
if not result.fetchone():
|
| 40 |
+
logger.info("Adding missing 'is_reminder' column to 'study_tasks'...")
|
| 41 |
+
await conn.execute(text("ALTER TABLE study_tasks ADD COLUMN is_reminder BOOLEAN DEFAULT FALSE;"))
|
| 42 |
+
|
| 43 |
+
logger.info("Database schema check complete. Orbit is synchronized. πΈ")
|
| 44 |
+
|
| 45 |
+
async def init_models():
|
| 46 |
+
"""Creates the tables if they don't exist and ensures columns are correct."""
|
| 47 |
+
async with engine.begin() as conn:
|
| 48 |
+
await conn.run_sync(Base.metadata.create_all)
|
| 49 |
+
|
| 50 |
+
# Run the manual migration fix for existing tables
|
| 51 |
+
await ensure_columns_exist()
|
| 52 |
+
logger.info("Database tables forged and synchronized successfully. π¨")
|
| 53 |
+
|
| 54 |
+
async def seed_data():
|
| 55 |
+
"""Drops some default tasks into the syllabus vault if it's empty."""
|
| 56 |
+
async with async_session() as db:
|
| 57 |
+
# Check if we already have tasks
|
| 58 |
+
result = await db.execute(text("SELECT count(*) FROM study_tasks"))
|
| 59 |
+
count = result.scalar()
|
| 60 |
+
|
| 61 |
+
if count == 0:
|
| 62 |
+
task1 = StudyTask(title="Master the Cardiac Cycle", subject="Cardiology", brain_rot_level=BrainRotLevel.COOKED)
|
| 63 |
+
task2 = StudyTask(title="Review Prop Firm Drawdown Rules", subject="Forex", brain_rot_level=BrainRotLevel.CHILL)
|
| 64 |
+
|
| 65 |
+
db.add(task1)
|
| 66 |
+
db.add(task2)
|
| 67 |
+
await db.commit()
|
| 68 |
+
logger.info("Genesis data injected. Orbit is alive.")
|
| 69 |
+
else:
|
| 70 |
+
logger.info("Data already exists. Skipping seed.")
|
| 71 |
+
|
| 72 |
+
async def main():
|
| 73 |
+
await init_models()
|
| 74 |
+
await seed_data()
|
| 75 |
+
|
| 76 |
+
if __name__ == "__main__":
|
| 77 |
+
logging.basicConfig(level=logging.INFO)
|
| 78 |
+
logger.info("Initiating Project Orbit Genesis Protocol...")
|
| 79 |
+
asyncio.run(main())
|
app/db/redis_cache.py
ADDED
|
File without changes
|
app/db/session.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/db/session.py
|
| 3 |
+
# VERSION: 1.0.4 | SYSTEM: Neon DB Resilience Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
| 7 |
+
from sqlalchemy.orm import sessionmaker
|
| 8 |
+
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("Orbit-DB")
|
| 13 |
+
|
| 14 |
+
# Create the Async Engine - Optimized for long-running AI requests
|
| 15 |
+
# π THE FIX: Added pool_pre_ping and pool_recycle to handle idle timeouts
|
| 16 |
+
# during long model downloads.
|
| 17 |
+
engine = create_async_engine(
|
| 18 |
+
settings.async_database_url,
|
| 19 |
+
echo=False,
|
| 20 |
+
future=True,
|
| 21 |
+
pool_pre_ping=True, # Checks if connection is alive before using it
|
| 22 |
+
pool_recycle=300, # Recycles connections every 5 minutes
|
| 23 |
+
pool_size=5, # Limit connections to save Neon resources
|
| 24 |
+
max_overflow=10 # Allow some burst overflow
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
# The Session Factory
|
| 28 |
+
async_session = sessionmaker(
|
| 29 |
+
engine, class_=AsyncSession, expire_on_commit=False
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
async def get_db():
|
| 33 |
+
"""
|
| 34 |
+
Dependency to inject the DB session into our routes.
|
| 35 |
+
Yields a session and safely closes it when the request is done.
|
| 36 |
+
"""
|
| 37 |
+
async with async_session() as session:
|
| 38 |
+
try:
|
| 39 |
+
yield session
|
| 40 |
+
except Exception as e:
|
| 41 |
+
# If the DB hits a stop loss, we roll back so we don't blow the account
|
| 42 |
+
logger.error(f"Database just hit a stop loss: {e}")
|
| 43 |
+
await session.rollback()
|
| 44 |
+
raise
|
| 45 |
+
finally:
|
| 46 |
+
await session.close()
|
app/main.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/main.py
|
| 3 |
+
# VERSION: 1.0.8 | SYSTEM: Auto-Migration Lifecycle π
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import logging
|
| 8 |
+
import asyncio
|
| 9 |
+
from contextlib import asynccontextmanager
|
| 10 |
+
from fastapi import FastAPI, Request, HTTPException, BackgroundTasks
|
| 11 |
+
from fastapi.responses import RedirectResponse
|
| 12 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 13 |
+
from typing import List, Dict, Any
|
| 14 |
+
import uvicorn
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
|
| 17 |
+
# π₯ THE MISSING LIQUIDITY: Master router for Orbit-AI, Forex, etc.
|
| 18 |
+
from app.api.v1.api import api_router
|
| 19 |
+
from app.db.init_db import init_models # π Import the Genesis Protocol
|
| 20 |
+
|
| 21 |
+
# Configure logging for the VM
|
| 22 |
+
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
| 23 |
+
logger = logging.getLogger("OrbitBrain")
|
| 24 |
+
|
| 25 |
+
# ===============================================================================
|
| 26 |
+
# BACKGROUND TASKS & LIFESPAN
|
| 27 |
+
# ===============================================================================
|
| 28 |
+
|
| 29 |
+
async def forex_guardian_monitor():
|
| 30 |
+
"""Simulates the 24/7 Forex MT5 monitor. Never sleeps. Just like the markets."""
|
| 31 |
+
try:
|
| 32 |
+
while True:
|
| 33 |
+
await asyncio.sleep(3600) # Check every hour in mock mode
|
| 34 |
+
except asyncio.CancelledError:
|
| 35 |
+
logger.info("Forex Guardian gracefully shutting down. Securing the bag.")
|
| 36 |
+
|
| 37 |
+
@asynccontextmanager
|
| 38 |
+
async def lifespan(app: FastAPI):
|
| 39 |
+
# Startup logic: Connect to Postgres, Redis, and start workers
|
| 40 |
+
logger.info("πͺ Orbit Brain booting up... Waking up Med-Scholar modules.")
|
| 41 |
+
|
| 42 |
+
# π AUTOMATIC DB SYNCHRONIZATION
|
| 43 |
+
try:
|
| 44 |
+
logger.info("Syncing Database Schema (Orbit v4.0.0)...")
|
| 45 |
+
await init_models()
|
| 46 |
+
logger.info("Database synchronized.")
|
| 47 |
+
except Exception as e:
|
| 48 |
+
logger.error(f"Critical Failure during Genesis Protocol: {e}")
|
| 49 |
+
# We continue booting even if DB sync fails, but logs will show why.
|
| 50 |
+
|
| 51 |
+
logger.info("Checking Redis cache for pending CATE triggers...")
|
| 52 |
+
|
| 53 |
+
forex_task = asyncio.create_task(forex_guardian_monitor())
|
| 54 |
+
yield
|
| 55 |
+
|
| 56 |
+
logger.info("Shutting down Orbit. Liquidating pending tasks and closing DB safely.")
|
| 57 |
+
forex_task.cancel()
|
| 58 |
+
|
| 59 |
+
# ===============================================================================
|
| 60 |
+
# APP INITIALIZATION
|
| 61 |
+
# ===============================================================================
|
| 62 |
+
|
| 63 |
+
app = FastAPI(
|
| 64 |
+
title="Project Orbit API",
|
| 65 |
+
description="The Life-OS backend for Med-Scholar, Forex Guardian, and CATE.",
|
| 66 |
+
version="3.1.0",
|
| 67 |
+
lifespan=lifespan,
|
| 68 |
+
swagger_ui_parameters={"docExpansion": "none"}
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
origins = [
|
| 72 |
+
"http://localhost",
|
| 73 |
+
"http://localhost:8080",
|
| 74 |
+
"*" # Allows all origins for now.
|
| 75 |
+
]
|
| 76 |
+
|
| 77 |
+
app.add_middleware(
|
| 78 |
+
CORSMiddleware,
|
| 79 |
+
allow_origins=origins,
|
| 80 |
+
allow_credentials=True,
|
| 81 |
+
allow_methods=["*"],
|
| 82 |
+
allow_headers=["*"],
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
# ===============================================================================
|
| 86 |
+
# CORE ENDPOINTS
|
| 87 |
+
# ===============================================================================
|
| 88 |
+
|
| 89 |
+
@app.get("/", include_in_schema=False)
|
| 90 |
+
async def root():
|
| 91 |
+
"""Instantly redirects you to the beautiful Swagger UI."""
|
| 92 |
+
return RedirectResponse(url="/docs")
|
| 93 |
+
|
| 94 |
+
@app.get("/health", tags=["System"])
|
| 95 |
+
async def health_check():
|
| 96 |
+
"""Render/HF uses this to verify the deployment didn't crash."""
|
| 97 |
+
return {"status": "healthy", "brain": "locked in", "timestamp": datetime.utcnow().isoformat()}
|
| 98 |
+
|
| 99 |
+
# Mount the real endpoints
|
| 100 |
+
app.include_router(api_router, prefix="/api/v1")
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
# ===============================================================================
|
| 104 |
+
# CONTEXT-AWARE TRIGGER ENGINE (CATE)
|
| 105 |
+
# ===============================================================================
|
| 106 |
+
|
| 107 |
+
@app.post("/api/v1/cate/sync", tags=["CATE"])
|
| 108 |
+
async def sync_offline_messages(request: Request, background_tasks: BackgroundTasks):
|
| 109 |
+
data = await request.json()
|
| 110 |
+
staged_messages = data.get("messages", [])
|
| 111 |
+
|
| 112 |
+
if not staged_messages:
|
| 113 |
+
return {"status": "no_data", "message": "Nothing to sync. We are chilling."}
|
| 114 |
+
|
| 115 |
+
logger.info(f"Received {len(staged_messages)} offline staged messages. Processing...")
|
| 116 |
+
|
| 117 |
+
for msg in staged_messages:
|
| 118 |
+
timestamp = msg.get("timestamp")
|
| 119 |
+
content = msg.get("content")
|
| 120 |
+
msg_type = msg.get("type", "task")
|
| 121 |
+
logger.info(f"Processing delayed {msg_type} from {timestamp}: {content}")
|
| 122 |
+
|
| 123 |
+
return {"status": "success", "synced_count": len(staged_messages)}
|
| 124 |
+
|
| 125 |
+
@app.post("/api/v1/cate/trigger", tags=["CATE"])
|
| 126 |
+
async def manual_cate_trigger(event_type: str):
|
| 127 |
+
if event_type == "sleep_timer_zero":
|
| 128 |
+
logger.info("CATE: Sleep timer hit zero. User has spawned. Initiating night-owl protocols.")
|
| 129 |
+
return {"status": "triggered", "action": "night_owl_mode_activated"}
|
| 130 |
+
return {"status": "ignored", "reason": "Unknown event type"}
|
| 131 |
+
|
| 132 |
+
# ===============================================================================
|
| 133 |
+
# ENTRY POINT
|
| 134 |
+
# ===============================================================================
|
| 135 |
+
if __name__ == "__main__":
|
| 136 |
+
port = int(os.environ.get("PORT", 8000))
|
| 137 |
+
logger.info(f"π Starting Orbit Brain on port {port}...")
|
| 138 |
+
uvicorn.run("app.main:app", host="0.0.0.0", port=port, reload=False)
|
app/models/__pycache__/study.cpython-312.pyc
ADDED
|
Binary file (1.71 kB). View file
|
|
|
app/models/study.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The Syllabus Vault / Models
|
| 3 |
+
# FILEPATH: backend/app/models/study.py
|
| 4 |
+
# COMPONENT: Database Schema
|
| 5 |
+
# ROLE: Defines what a Study Task and Syllabus look like in the DB.
|
| 6 |
+
# VIBE: The harsh reality of how much Anatomy you haven't read yet. ππ©Ί
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, Enum, Text
|
| 10 |
+
from sqlalchemy.orm import declarative_base
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import enum
|
| 13 |
+
|
| 14 |
+
Base = declarative_base()
|
| 15 |
+
|
| 16 |
+
class BrainRotLevel(enum.Enum):
|
| 17 |
+
CHILL = "chill" # Just highlighting notes
|
| 18 |
+
MID = "mid" # Actually trying to understand Pharmacology
|
| 19 |
+
COOKED = "cooked" # 3 AM before the exam, straight panic
|
| 20 |
+
|
| 21 |
+
class StudyTask(Base):
|
| 22 |
+
__tablename__ = "study_tasks"
|
| 23 |
+
|
| 24 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 25 |
+
title = Column(String, index=True, nullable=False)
|
| 26 |
+
subject = Column(String, index=True, nullable=False) # e.g., "Internal Medicine"
|
| 27 |
+
|
| 28 |
+
# Scheduling & Sync
|
| 29 |
+
due_date = Column(DateTime, nullable=True)
|
| 30 |
+
completed = Column(Boolean, default=False)
|
| 31 |
+
|
| 32 |
+
# New metadata for Orbit Life-OS v4.0.0
|
| 33 |
+
remarks = Column(Text, nullable=True)
|
| 34 |
+
is_reminder = Column(Boolean, default=False)
|
| 35 |
+
|
| 36 |
+
# Chronotype-based scheduling hint
|
| 37 |
+
brain_rot_level = Column(Enum(BrainRotLevel), default=BrainRotLevel.CHILL)
|
| 38 |
+
|
| 39 |
+
created_at = Column(DateTime, default=datetime.utcnow)
|
| 40 |
+
|
| 41 |
+
def __repr__(self):
|
| 42 |
+
return f"<StudyTask(title='{self.title}', subject='{self.subject}', completed={self.completed})>"
|
app/routers/med_scholar.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/routers/med_scholar.py
|
| 3 |
+
#VERSION: 3.1.5 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 7 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 8 |
+
from sqlalchemy.future import select
|
| 9 |
+
from typing import List, Dict, Any
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
from app.db.session import get_db
|
| 13 |
+
from app.models.study import StudyTask
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger("Orbit-MedScholar")
|
| 16 |
+
|
| 17 |
+
router = APIRouter(prefix="/study/tasks", tags=["med-scholar"])
|
| 18 |
+
|
| 19 |
+
@router.get("/pending")
|
| 20 |
+
async def get_pending_tasks(db: AsyncSession = Depends(get_db)) -> List[Dict[str, Any]]:
|
| 21 |
+
"""Fetches pending study tasks."""
|
| 22 |
+
result = await db.execute(select(StudyTask).where(StudyTask.completed == False))
|
| 23 |
+
tasks = result.scalars().all()
|
| 24 |
+
|
| 25 |
+
if not tasks:
|
| 26 |
+
return []
|
| 27 |
+
|
| 28 |
+
return [{
|
| 29 |
+
"id": t.id,
|
| 30 |
+
"title": t.title,
|
| 31 |
+
"subject": t.subject,
|
| 32 |
+
"brainRotLevel": getattr(t, 'brain_rot_level', 'LOW').name if hasattr(getattr(t, 'brain_rot_level', 'LOW'), 'name') else str(getattr(t, 'brain_rot_level', 'LOW')).split('.')[-1],
|
| 33 |
+
"isCompleted": getattr(t, 'completed', False),
|
| 34 |
+
"dueDate": t.due_date.isoformat() if getattr(t, 'due_date', None) else None
|
| 35 |
+
} for t in tasks]
|
| 36 |
+
|
| 37 |
+
# π₯ THE FIX: The VM endpoint to actually receive the completion signal and update Postgres!
|
| 38 |
+
@router.put("/{task_id}/complete", status_code=status.HTTP_200_OK)
|
| 39 |
+
async def complete_task(task_id: int, db: AsyncSession = Depends(get_db)):
|
| 40 |
+
"""Marks a task as completed in the Matrix."""
|
| 41 |
+
result = await db.execute(select(StudyTask).where(StudyTask.id == task_id))
|
| 42 |
+
task = result.scalars().first()
|
| 43 |
+
|
| 44 |
+
if not task:
|
| 45 |
+
logger.warning(f"Attempted to complete ghost task ID: {task_id}")
|
| 46 |
+
raise HTTPException(status_code=404, detail="Task not found. Bro is hallucinating.")
|
| 47 |
+
|
| 48 |
+
task.completed = True
|
| 49 |
+
await db.commit()
|
| 50 |
+
|
| 51 |
+
logger.info(f"W Secured! Task '{task.title}' marked as completed. Let's go! π")
|
| 52 |
+
|
| 53 |
+
# Ideally, we trigger a Blast Protocol here to update the Workstation UI too!
|
| 54 |
+
return {"status": "success", "message": "Bag secured. Task wiped."}
|
app/routers/orbit_ai.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/routers/orbit_ai.py
|
| 3 |
+
# VERSION: 4.4.0 | SYSTEM: Orbit (The Life-OS Protocol)
|
| 4 |
+
# IDENTITY: The Voice / Chat Endpoint - Task Management & Memory Execution
|
| 5 |
+
################################################################################
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from typing import List, Optional
|
| 10 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 11 |
+
from sqlalchemy.future import select
|
| 12 |
+
from app.db.session import get_db
|
| 13 |
+
from app.models.study import StudyTask, BrainRotLevel
|
| 14 |
+
from app.services.orbit_brain import OrbitAssistant
|
| 15 |
+
import logging
|
| 16 |
+
|
| 17 |
+
logger = logging.getLogger("Orbit-Voice")
|
| 18 |
+
|
| 19 |
+
router = APIRouter(prefix="/orbit", tags=["Orbit-AI"])
|
| 20 |
+
|
| 21 |
+
class ChatMessage(BaseModel):
|
| 22 |
+
role: str # "user" or "model"
|
| 23 |
+
content: str
|
| 24 |
+
|
| 25 |
+
class ChatRequest(BaseModel):
|
| 26 |
+
message: str
|
| 27 |
+
history: Optional[List[ChatMessage]] = []
|
| 28 |
+
|
| 29 |
+
class ChatResponse(BaseModel):
|
| 30 |
+
reply: str
|
| 31 |
+
status: str = "success"
|
| 32 |
+
|
| 33 |
+
@router.post("/converse", response_model=ChatResponse)
|
| 34 |
+
async def converse_with_orbit(request: ChatRequest, db: AsyncSession = Depends(get_db)):
|
| 35 |
+
"""The main neural link for talking to Orbit. Now with memory and schedule control!"""
|
| 36 |
+
try:
|
| 37 |
+
assistant = OrbitAssistant(db_session=db)
|
| 38 |
+
|
| 39 |
+
user_msg = request.message
|
| 40 |
+
|
| 41 |
+
# Run the AI chat with history
|
| 42 |
+
ai_reply = await assistant.chat(
|
| 43 |
+
user_msg,
|
| 44 |
+
history=[{"role": h.role, "parts": [h.content]} for h in request.history] if request.history else []
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# 1. Handle Task Creation
|
| 48 |
+
if assistant.tasks_to_create:
|
| 49 |
+
for task_data in assistant.tasks_to_create:
|
| 50 |
+
new_task = StudyTask(
|
| 51 |
+
title=task_data["title"],
|
| 52 |
+
subject=task_data["subject"],
|
| 53 |
+
due_date=task_data.get("due_date"),
|
| 54 |
+
brain_rot_level=task_data["brain_rot_level"],
|
| 55 |
+
is_reminder=task_data.get("is_reminder", False),
|
| 56 |
+
remarks=task_data.get("remarks")
|
| 57 |
+
)
|
| 58 |
+
db.add(new_task)
|
| 59 |
+
logger.info(f"W Secured: Created {len(assistant.tasks_to_create)} tasks! π―")
|
| 60 |
+
|
| 61 |
+
# 2. Handle Task Updates
|
| 62 |
+
if assistant.tasks_to_update:
|
| 63 |
+
for update_data in assistant.tasks_to_update:
|
| 64 |
+
task_id = update_data["task_id"]
|
| 65 |
+
updates = update_data["updates"]
|
| 66 |
+
|
| 67 |
+
result = await db.execute(select(StudyTask).where(StudyTask.id == task_id))
|
| 68 |
+
task = result.scalars().first()
|
| 69 |
+
if task:
|
| 70 |
+
for key, value in updates.items():
|
| 71 |
+
if key == "brain_rot_level" and value:
|
| 72 |
+
rot_map = {"chill": BrainRotLevel.CHILL, "mid": BrainRotLevel.MID, "cooked": BrainRotLevel.COOKED}
|
| 73 |
+
setattr(task, key, rot_map.get(value.lower(), BrainRotLevel.MID))
|
| 74 |
+
elif hasattr(task, key):
|
| 75 |
+
setattr(task, key, value)
|
| 76 |
+
logger.info(f"Orbit updated {len(assistant.tasks_to_update)} tasks.")
|
| 77 |
+
|
| 78 |
+
# 3. Handle Task Deletions
|
| 79 |
+
if assistant.tasks_to_delete:
|
| 80 |
+
for task_id in assistant.tasks_to_delete:
|
| 81 |
+
result = await db.execute(select(StudyTask).where(StudyTask.id == task_id))
|
| 82 |
+
task = result.scalars().first()
|
| 83 |
+
if task:
|
| 84 |
+
await db.delete(task)
|
| 85 |
+
logger.info(f"Orbit deleted {len(assistant.tasks_to_delete)} tasks.")
|
| 86 |
+
|
| 87 |
+
await db.commit()
|
| 88 |
+
return ChatResponse(reply=ai_reply)
|
| 89 |
+
|
| 90 |
+
except Exception as e:
|
| 91 |
+
logger.error(f"Orbit's brain crashed: {str(e)}")
|
| 92 |
+
await db.rollback()
|
| 93 |
+
raise HTTPException(status_code=500, detail=f"Orbit's brain crashed: {str(e)}")
|
app/services/blast.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The Megaphone / Blast Protocol
|
| 3 |
+
# FILEPATH: backend/app/services/blast.py
|
| 4 |
+
# COMPONENT: WebSocket Manager
|
| 5 |
+
# ROLE: Blasts real-time syncs to Ubuntu & Android.
|
| 6 |
+
# VIBE: "WAKE UP, YOUR STOP LOSS JUST GOT HIT!" π¨π
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
from fastapi import WebSocket
|
| 10 |
+
import logging
|
| 11 |
+
import json
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger("Blast-Protocol")
|
| 14 |
+
|
| 15 |
+
class BlastManager:
|
| 16 |
+
"""
|
| 17 |
+
Manages dual-device sync. When the VM updates something,
|
| 18 |
+
this class makes sure your laptop and phone know instantly.
|
| 19 |
+
"""
|
| 20 |
+
def __init__(self):
|
| 21 |
+
self.active_devices: list[WebSocket] = []
|
| 22 |
+
|
| 23 |
+
async def connect(self, websocket: WebSocket):
|
| 24 |
+
await websocket.accept()
|
| 25 |
+
self.active_devices.append(websocket)
|
| 26 |
+
logger.info(f"Device locked in. Total devices: {len(self.active_devices)}")
|
| 27 |
+
|
| 28 |
+
def disconnect(self, websocket: WebSocket):
|
| 29 |
+
if websocket in self.active_devices:
|
| 30 |
+
self.active_devices.remove(websocket)
|
| 31 |
+
logger.info("Device disconnected. Hope they didn't rage quit.")
|
| 32 |
+
|
| 33 |
+
async def blast_event(self, event_type: str, payload: dict):
|
| 34 |
+
"""
|
| 35 |
+
The main artery. Sends a JSON payload to all connected devices.
|
| 36 |
+
Used for Forex SL/TP alerts, Task reminders, etc.
|
| 37 |
+
"""
|
| 38 |
+
message = {
|
| 39 |
+
"type": event_type,
|
| 40 |
+
"data": payload
|
| 41 |
+
}
|
| 42 |
+
dead_connections = []
|
| 43 |
+
for device in self.active_devices:
|
| 44 |
+
try:
|
| 45 |
+
await device.send_text(json.dumps(message))
|
| 46 |
+
except Exception as e:
|
| 47 |
+
logger.error(f"Failed to blast device: {e}")
|
| 48 |
+
dead_connections.append(device)
|
| 49 |
+
|
| 50 |
+
# Clean up connections that ghosted us
|
| 51 |
+
for dead in dead_connections:
|
| 52 |
+
self.disconnect(dead)
|
| 53 |
+
|
| 54 |
+
# Global instance to be imported across the app (like in forex.py)
|
| 55 |
+
blast_engine = BlastManager()
|
app/services/cate.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The AI Big Brother / CATE
|
| 3 |
+
# FILEPATH: backend/app/services/cate.py
|
| 4 |
+
# COMPONENT: Proactive Intelligence
|
| 5 |
+
# ROLE: Decides WHEN to bother you based on your context.
|
| 6 |
+
# VIBE: Your toxic but highly organized manager. π
π§
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
import datetime
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("CATE")
|
| 13 |
+
|
| 14 |
+
class ContextAwareTriggerEngine:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
self.user_status = "CHILLING" # Could be "TRADING", "STUDYING", "SLEEPING"
|
| 17 |
+
|
| 18 |
+
def update_context(self, new_status: str):
|
| 19 |
+
self.user_status = new_status
|
| 20 |
+
logger.info(f"CATE updated user context to: {self.user_status}")
|
| 21 |
+
|
| 22 |
+
def evaluate_trigger(self, event_type: str, priority: str) -> bool:
|
| 23 |
+
"""
|
| 24 |
+
Decides if an event should actually interrupt you.
|
| 25 |
+
Because we don't want a "Study Pharma" notification while you're
|
| 26 |
+
monitoring a heavy lot size on Gold (XAUUSD).
|
| 27 |
+
"""
|
| 28 |
+
current_hour = datetime.datetime.now().hour
|
| 29 |
+
|
| 30 |
+
# Rule 1: Never interrupt deep sleep unless it's a margin call π
|
| 31 |
+
if 2 <= current_hour <= 5 and priority != "CRITICAL":
|
| 32 |
+
logger.info("CATE blocked notification. Let bro sleep.")
|
| 33 |
+
return False
|
| 34 |
+
|
| 35 |
+
# Rule 2: If we are trading, block low-priority study alerts
|
| 36 |
+
if self.user_status == "TRADING" and event_type == "STUDY_REMINDER":
|
| 37 |
+
logger.info("CATE blocked study reminder. Bro is watching the charts.")
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
# If it passes the vibe check, blast it
|
| 41 |
+
logger.info(f"CATE approved trigger: {event_type}")
|
| 42 |
+
return True
|
| 43 |
+
|
| 44 |
+
# Initialize CATE
|
| 45 |
+
cate = ContextAwareTriggerEngine()
|
app/services/governor.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The Manager / Life-Governor
|
| 3 |
+
# FILEPATH: backend/app/services/governor.py
|
| 4 |
+
# COMPONENT: Chronotype Scheduling Logic
|
| 5 |
+
# ROLE: Decides what you should be doing based on the time of day.
|
| 6 |
+
# VIBE: "Bro it's 2 AM, why are you trying to learn Cardiology? Go to sleep." ποΈ
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
from datetime import datetime
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("Life-Governor")
|
| 13 |
+
|
| 14 |
+
class LifeGovernor:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
# Kisumu timezone vibes.
|
| 17 |
+
# Hardcoding the prime hours based on your chronotype.
|
| 18 |
+
self.peak_focus_hours = range(8, 12) # 8 AM to 12 PM - Internal Med time
|
| 19 |
+
self.grind_hours = range(14, 18) # 2 PM to 6 PM - Coding / SHOFCO time
|
| 20 |
+
self.london_session = range(10, 19) # Forex London crossover vibes
|
| 21 |
+
|
| 22 |
+
def get_current_recommendation(self) -> str:
|
| 23 |
+
"""
|
| 24 |
+
Returns a string recommending what you should be doing right now.
|
| 25 |
+
"""
|
| 26 |
+
current_hour = datetime.now().hour
|
| 27 |
+
|
| 28 |
+
if current_hour in range(0, 5):
|
| 29 |
+
return "SLEEP_MODE: The markets are dead and your brain is cooked. Go to bed. π"
|
| 30 |
+
|
| 31 |
+
elif current_hour in self.peak_focus_hours:
|
| 32 |
+
return "DEEP_WORK: Highest cognitive load recommended. Lock in on that Med School syllabus."
|
| 33 |
+
|
| 34 |
+
elif current_hour in self.grind_hours:
|
| 35 |
+
return "EXECUTION: Good time for coding the VM or handling SHOFCO tasks."
|
| 36 |
+
|
| 37 |
+
elif current_hour >= 20:
|
| 38 |
+
return "WIND_DOWN: Backtest your Forex strategies, review Anki flashcards, and chill."
|
| 39 |
+
|
| 40 |
+
else:
|
| 41 |
+
return "FLEX_TIME: Do whatever. Maybe grab some fish by the lake?"
|
| 42 |
+
|
| 43 |
+
governor = LifeGovernor()
|
app/services/llm.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/services/llm.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import google.generativeai as genai
|
| 7 |
+
# π FIX: Added 'app.' prefix
|
| 8 |
+
from app.core.config import settings
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger("Orbit-Speak")
|
| 12 |
+
|
| 13 |
+
class OrbitSpeak:
|
| 14 |
+
def __init__(self):
|
| 15 |
+
if settings.GEMINI_API_KEY:
|
| 16 |
+
genai.configure(api_key=settings.GEMINI_API_KEY)
|
| 17 |
+
self.model = genai.GenerativeModel('gemini-2.5-flash')
|
| 18 |
+
logger.info("Orbit-Speak initialized. Gemini is online and ready to judge you.")
|
| 19 |
+
else:
|
| 20 |
+
self.model = None
|
| 21 |
+
logger.warning("No Gemini API key found. Orbit is currently mute. π€")
|
| 22 |
+
|
| 23 |
+
async def generate_response(self, prompt: str, context: str = "chilling") -> str:
|
| 24 |
+
if not self.model:
|
| 25 |
+
return "Bro, you forgot to give me my API key. Check the .env file."
|
| 26 |
+
|
| 27 |
+
system_prompt = f"""
|
| 28 |
+
You are Orbit, an advanced Life-OS assistant.
|
| 29 |
+
Current user context: {context}.
|
| 30 |
+
Keep your responses concise, slightly Gen-Z, and highly pragmatic.
|
| 31 |
+
If the user is over-leveraging in Forex, tell them to touch grass.
|
| 32 |
+
If they are avoiding Med School studying, roast them.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
full_prompt = f"{system_prompt}\nUser says: {prompt}"
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
response = self.model.generate_content(full_prompt)
|
| 39 |
+
return response.text
|
| 40 |
+
except Exception as e:
|
| 41 |
+
logger.error(f"Gemini API hit a stop loss: {e}")
|
| 42 |
+
return "My brain is fried right now (API Error). Ask me later."
|
| 43 |
+
|
| 44 |
+
orbit_brain = OrbitSpeak()
|
app/services/med_scan.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/services/med_scan.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
import google.generativeai as genai
|
| 8 |
+
# π FIX: Added 'app.' prefix
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger("Med-Scan")
|
| 12 |
+
|
| 13 |
+
class RadiologistEngine:
|
| 14 |
+
def __init__(self):
|
| 15 |
+
if settings.GEMINI_API_KEY:
|
| 16 |
+
genai.configure(api_key=settings.GEMINI_API_KEY)
|
| 17 |
+
self.model = genai.GenerativeModel('gemini-2.5-flash')
|
| 18 |
+
logger.info("Med-Scan OCR initialized. Ready to decode hieroglyphics.")
|
| 19 |
+
else:
|
| 20 |
+
self.model = None
|
| 21 |
+
logger.warning("No Gemini API key. Good luck reading that prescription yourself.")
|
| 22 |
+
|
| 23 |
+
async def scan_chart(self, image_bytes: bytes) -> dict:
|
| 24 |
+
if not self.model:
|
| 25 |
+
return {"status": "error", "message": "API Key missing. Cannot process image."}
|
| 26 |
+
|
| 27 |
+
logger.info("Processing new medical chart... Hold tight.")
|
| 28 |
+
|
| 29 |
+
prompt = """
|
| 30 |
+
You are an expert radiologist, medical scribe, and top-tier Med School professor.
|
| 31 |
+
Read this patient chart, ECG, or medical notes image.
|
| 32 |
+
Extract the key information (Symptoms, Diagnosis, Plan, Pathophysiology) and format it cleanly using Markdown.
|
| 33 |
+
If the handwriting is absolutely cooked and illegible, just do your best and flag it for manual review.
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
try:
|
| 37 |
+
response = self.model.generate_content([prompt, image_bytes])
|
| 38 |
+
return {"status": "success", "extracted_data": response.text}
|
| 39 |
+
except Exception as e:
|
| 40 |
+
logger.error(f"Med-Scan hit a snag: {e}")
|
| 41 |
+
return {"status": "error", "message": "Failed to decode the chart. Might actually be a demon spell."}
|
| 42 |
+
|
| 43 |
+
radiologist = RadiologistEngine()
|
app/services/memory.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/services/memory.py
|
| 3 |
+
# VERSION: 1.0.0 | SYSTEM: Orbit Memory Protocol
|
| 4 |
+
# IDENTITY: The Long-term Memory (Vector DB / ChromaDB)
|
| 5 |
+
################################################################################
|
| 6 |
+
|
| 7 |
+
import chromadb
|
| 8 |
+
from chromadb.utils import embedding_functions
|
| 9 |
+
import logging
|
| 10 |
+
import os
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("Orbit-Memory")
|
| 13 |
+
|
| 14 |
+
class MemoryService:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
# Store memory in a local directory
|
| 17 |
+
persist_directory = os.path.join(os.getcwd(), "orbit_memory")
|
| 18 |
+
self.client = chromadb.PersistentClient(path=persist_directory)
|
| 19 |
+
|
| 20 |
+
# Using default embedding function (requires internet for some, but Chroma's default is local)
|
| 21 |
+
self.collection = self.client.get_or_create_collection(
|
| 22 |
+
name="orbit_user_preferences",
|
| 23 |
+
metadata={"hnsw:space": "cosine"}
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
def learn(self, fact: str):
|
| 27 |
+
"""Orbit learns something about the user."""
|
| 28 |
+
try:
|
| 29 |
+
# We use the fact itself as the ID or a hash
|
| 30 |
+
fact_id = str(hash(fact))
|
| 31 |
+
self.collection.add(
|
| 32 |
+
documents=[fact],
|
| 33 |
+
ids=[fact_id]
|
| 34 |
+
)
|
| 35 |
+
logger.info(f"Orbit learned: {fact}")
|
| 36 |
+
except Exception as e:
|
| 37 |
+
logger.error(f"Failed to learn: {e}")
|
| 38 |
+
|
| 39 |
+
def query(self, user_query: str, n_results: int = 3) -> str:
|
| 40 |
+
"""Orbit remembers relevant context."""
|
| 41 |
+
try:
|
| 42 |
+
results = self.collection.query(
|
| 43 |
+
query_texts=[user_query],
|
| 44 |
+
n_results=n_results
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
documents = results.get('documents', [[]])[0]
|
| 48 |
+
if documents:
|
| 49 |
+
return "\n".join(documents)
|
| 50 |
+
return ""
|
| 51 |
+
except Exception as e:
|
| 52 |
+
logger.error(f"Memory retrieval failed: {e}")
|
| 53 |
+
return ""
|
| 54 |
+
|
| 55 |
+
memory_service = MemoryService()
|
app/services/mt5_bridge.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/services/mt5_bridge.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import MetaTrader5 as mt5
|
| 7 |
+
# π FIX: Added 'app.' prefix
|
| 8 |
+
from app.core.config import settings
|
| 9 |
+
import logging
|
| 10 |
+
import pandas as pd
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger("MT5-Bridge")
|
| 13 |
+
|
| 14 |
+
class MT5Engine:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
self.connected = False
|
| 17 |
+
|
| 18 |
+
def connect(self):
|
| 19 |
+
if not mt5.initialize():
|
| 20 |
+
logger.error(f"MT5 Init failed. Error code: {mt5.last_error()}")
|
| 21 |
+
return False
|
| 22 |
+
|
| 23 |
+
authorized = mt5.login(
|
| 24 |
+
settings.MT5_LOGIN,
|
| 25 |
+
password=settings.MT5_PASSWORD,
|
| 26 |
+
server=settings.MT5_SERVER
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
if authorized:
|
| 30 |
+
self.connected = True
|
| 31 |
+
logger.info(f"MT5 Connected successfully to {settings.MT5_SERVER}. Let's get this bread.")
|
| 32 |
+
return True
|
| 33 |
+
else:
|
| 34 |
+
logger.error(f"Failed to connect to MT5. Did you blow the account? Code: {mt5.last_error()}")
|
| 35 |
+
return False
|
| 36 |
+
|
| 37 |
+
def check_exposure(self):
|
| 38 |
+
if not self.connected:
|
| 39 |
+
self.connect()
|
| 40 |
+
|
| 41 |
+
account_info = mt5.account_info()
|
| 42 |
+
if account_info is None:
|
| 43 |
+
return {"status": "error", "message": "Could not retrieve account info."}
|
| 44 |
+
|
| 45 |
+
margin_level = account_info.margin_level
|
| 46 |
+
equity = account_info.equity
|
| 47 |
+
|
| 48 |
+
if margin_level > 0 and margin_level < 300.0:
|
| 49 |
+
return {"status": "DANGER", "message": "Margin Level critically low. Close some positions bro!"}
|
| 50 |
+
|
| 51 |
+
return {"status": "CHILL", "equity": equity, "margin_level": margin_level}
|
| 52 |
+
|
| 53 |
+
mt5_engine = MT5Engine()
|
app/services/orbit_brain.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/app/services/orbit_brain.py
|
| 3 |
+
# VERSION: 5.7.0 | SYSTEM: Orbit (The Life-OS Protocol)
|
| 4 |
+
# IDENTITY: The Brain / Gemini GenAI SDK - Model & Key Rotation Matrix
|
| 5 |
+
################################################################################
|
| 6 |
+
|
| 7 |
+
from google import genai
|
| 8 |
+
from google.genai import types
|
| 9 |
+
from datetime import datetime, timedelta
|
| 10 |
+
import logging
|
| 11 |
+
import asyncio
|
| 12 |
+
import pytz
|
| 13 |
+
import random
|
| 14 |
+
|
| 15 |
+
from app.models.study import BrainRotLevel, StudyTask
|
| 16 |
+
from app.core.config import settings
|
| 17 |
+
from app.services.memory import memory_service
|
| 18 |
+
from sqlalchemy.future import select
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger("Orbit-Brain")
|
| 21 |
+
|
| 22 |
+
class OrbitAssistant:
|
| 23 |
+
# π― FORCE MODELS: Using the requested Tier-1 models for maximum liquidity
|
| 24 |
+
MODELS = ["gemini-2.5-flash", "gemini-2.5-flash-lite", "gemini-flash-latest"]
|
| 25 |
+
|
| 26 |
+
def __init__(self, db_session=None):
|
| 27 |
+
self.db = db_session
|
| 28 |
+
self.tasks_to_create = []
|
| 29 |
+
self.tasks_to_update = []
|
| 30 |
+
self.tasks_to_delete = []
|
| 31 |
+
self.user_tz = pytz.timezone("Africa/Nairobi")
|
| 32 |
+
nairobi_now_dt = datetime.now(self.user_tz)
|
| 33 |
+
nairobi_now = nairobi_now_dt.strftime("%Y-%m-%d %H:%M:%S")
|
| 34 |
+
|
| 35 |
+
self.system_prompt = f"""
|
| 36 |
+
You are Orbit, an elite, highly intelligent, Gen-Z "Life-OS" Chief of Staff.
|
| 37 |
+
Your boss is a medical student living in Kisumu, Kenya.
|
| 38 |
+
|
| 39 |
+
CURRENT TIME (Nairobi/EAT): {nairobi_now}
|
| 40 |
+
Always assume the user is in EAT-Nairobi.
|
| 41 |
+
|
| 42 |
+
YOUR PILLARS:
|
| 43 |
+
1. "Med-Scholar": Medicine, CATs, exams.
|
| 44 |
+
2. "Projects": Coding, tech.
|
| 45 |
+
3. "Internship": SHOFCO Libraries.
|
| 46 |
+
4. "Life Admin": Bible study, errands, life.
|
| 47 |
+
5. "Forex Guardian": XAUUSD, trading.
|
| 48 |
+
|
| 49 |
+
BRAIN ROT LEVELS: "chill", "mid", "cooked".
|
| 50 |
+
|
| 51 |
+
TONE: Confident, sassy, Gen-Z slang ("no cap", "W", "cooked", "locked in").
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
self.api_keys = settings.get_all_api_keys()
|
| 55 |
+
if not self.api_keys:
|
| 56 |
+
logger.error("NO API KEYS FOUND! Orbit is clinically brain dead. π")
|
| 57 |
+
raise ValueError("Missing GEMINI_API_KEY")
|
| 58 |
+
|
| 59 |
+
# Initial pointers for rotation matrix
|
| 60 |
+
self.current_key_index = random.randint(0, len(self.api_keys) - 1)
|
| 61 |
+
self.current_model_index = 0
|
| 62 |
+
|
| 63 |
+
async def get_relevant_context(self, user_message: str) -> str:
|
| 64 |
+
"""Fetch memory and recent task completions."""
|
| 65 |
+
context_parts = []
|
| 66 |
+
try:
|
| 67 |
+
# 1. Memory Context (ChromaDB)
|
| 68 |
+
memory = memory_service.query(user_message)
|
| 69 |
+
if memory:
|
| 70 |
+
context_parts.append(f"PAST PREFERENCES/MEMORY:\n{memory}")
|
| 71 |
+
|
| 72 |
+
# 2. Database Context (Recent & Active Tasks)
|
| 73 |
+
if self.db:
|
| 74 |
+
try:
|
| 75 |
+
result = await self.db.execute(
|
| 76 |
+
select(StudyTask)
|
| 77 |
+
.where(StudyTask.completed == True)
|
| 78 |
+
.order_by(StudyTask.created_at.desc())
|
| 79 |
+
.limit(5)
|
| 80 |
+
)
|
| 81 |
+
recent_tasks = result.scalars().all()
|
| 82 |
+
if recent_tasks:
|
| 83 |
+
reviews = "\n".join([f"- {t.title}: {t.remarks}" for t in recent_tasks if t.remarks])
|
| 84 |
+
if reviews:
|
| 85 |
+
context_parts.append(f"RECENT TASK FEEDBACK:\n{reviews}")
|
| 86 |
+
|
| 87 |
+
result = await self.db.execute(select(StudyTask).where(StudyTask.completed == False))
|
| 88 |
+
active_tasks = result.scalars().all()
|
| 89 |
+
if active_tasks:
|
| 90 |
+
task_brief = "\n".join([f"ID {t.id}: {t.title} ({t.subject}) - Due: {t.due_date}" for t in active_tasks])
|
| 91 |
+
context_parts.append(f"CURRENT SCHEDULE:\n{task_brief}")
|
| 92 |
+
except Exception as db_err:
|
| 93 |
+
logger.warning(f"DB Context skipped: {db_err}")
|
| 94 |
+
|
| 95 |
+
except Exception as e:
|
| 96 |
+
logger.error(f"Context retrieval failed: {e}")
|
| 97 |
+
|
| 98 |
+
return "\n".join(context_parts)
|
| 99 |
+
|
| 100 |
+
async def chat(self, user_message: str, history: list = None) -> str:
|
| 101 |
+
"""
|
| 102 |
+
Sends message with a 'Rotation Matrix' strategy.
|
| 103 |
+
It cycles through ALL models for a key before moving to the next key.
|
| 104 |
+
"""
|
| 105 |
+
context = await self.get_relevant_context(user_message)
|
| 106 |
+
dynamic_prompt = self.system_prompt
|
| 107 |
+
if context:
|
| 108 |
+
dynamic_prompt += f"\n\nRELEVANT CONTEXT:\n{context}"
|
| 109 |
+
|
| 110 |
+
formatted_history = []
|
| 111 |
+
if history:
|
| 112 |
+
for h in history:
|
| 113 |
+
formatted_history.append(types.Content(role=h["role"], parts=[types.Part(text=p) for p in h["parts"]]))
|
| 114 |
+
|
| 115 |
+
# --- THE ROTATION MATRIX ---
|
| 116 |
+
# Total attempts = Number of Keys * Number of Models
|
| 117 |
+
total_keys = len(self.api_keys)
|
| 118 |
+
total_models = len(self.MODELS)
|
| 119 |
+
|
| 120 |
+
for key_attempt in range(total_keys):
|
| 121 |
+
current_key = self.api_keys[self.current_key_index]
|
| 122 |
+
|
| 123 |
+
# Skip placeholders
|
| 124 |
+
if not current_key or "your_gemini" in current_key:
|
| 125 |
+
self.current_key_index = (self.current_key_index + 1) % total_keys
|
| 126 |
+
continue
|
| 127 |
+
|
| 128 |
+
client = genai.Client(api_key=current_key)
|
| 129 |
+
|
| 130 |
+
for model_attempt in range(total_models):
|
| 131 |
+
current_model = self.MODELS[self.current_model_index]
|
| 132 |
+
|
| 133 |
+
try:
|
| 134 |
+
logger.info(f"Trying {current_model} with key {current_key[:8]}...")
|
| 135 |
+
|
| 136 |
+
chat = client.chats.create(
|
| 137 |
+
model=current_model,
|
| 138 |
+
config=types.GenerateContentConfig(
|
| 139 |
+
system_instruction=dynamic_prompt,
|
| 140 |
+
automatic_function_calling=types.AutomaticFunctionCallingConfig(disable=False)
|
| 141 |
+
),
|
| 142 |
+
history=formatted_history
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
response = await asyncio.to_thread(chat.send_message, user_message)
|
| 146 |
+
reply_text = response.text
|
| 147 |
+
|
| 148 |
+
if not reply_text:
|
| 149 |
+
reply_text = "Task secured. π―" if (self.tasks_to_create or self.tasks_to_update or self.tasks_to_delete) else "I'm locked in."
|
| 150 |
+
|
| 151 |
+
return reply_text
|
| 152 |
+
|
| 153 |
+
except Exception as e:
|
| 154 |
+
err_str = str(e)
|
| 155 |
+
# If it's a rate limit or invalid key, we rotate models first, then keys
|
| 156 |
+
if any(err in err_str for err in ["429", "RESOURCE_EXHAUSTED", "400", "INVALID_ARGUMENT", "API_KEY_INVALID"]):
|
| 157 |
+
logger.warning(f"β οΈ {current_model} failed with key {current_key[:8]}. Error: {err_str[:50]}...")
|
| 158 |
+
|
| 159 |
+
# Move to the next model in the list
|
| 160 |
+
self.current_model_index = (self.current_model_index + 1) % total_models
|
| 161 |
+
|
| 162 |
+
# If we've tried all models for this key, move to next key and reset model pointer
|
| 163 |
+
if model_attempt == total_models - 1:
|
| 164 |
+
logger.warning(f"β All models failed for key {current_key[:8]}. Rotating to next key.")
|
| 165 |
+
self.current_key_index = (self.current_key_index + 1) % total_keys
|
| 166 |
+
self.current_model_index = 0
|
| 167 |
+
continue
|
| 168 |
+
else:
|
| 169 |
+
# For other errors (Connection, etc.), we don't necessarily rotate, just log and fail
|
| 170 |
+
logger.error(f"Critical AI Error: {err_str}")
|
| 171 |
+
raise e
|
| 172 |
+
|
| 173 |
+
return "Orbit is fully cooked. All keys and models hit a stop-loss. π"
|
app/services/psych_check.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ==========================================
|
| 2 |
+
# IDENTITY: The Therapist / Psych-Check Engine
|
| 3 |
+
# FILEPATH: backend/app/services/psych_check.py
|
| 4 |
+
# COMPONENT: Proactive Intelligence
|
| 5 |
+
# ROLE: Monitors your "heart rate" to lock trading if you're panicking.
|
| 6 |
+
# VIBE: "Bro your BPM is 140, step away from the terminal. You are about to revenge trade." ππ
|
| 7 |
+
# ==========================================
|
| 8 |
+
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger("Psych-Check")
|
| 12 |
+
|
| 13 |
+
class PsychCheckEngine:
|
| 14 |
+
def __init__(self):
|
| 15 |
+
self.trading_locked = False
|
| 16 |
+
self.baseline_bpm = 70
|
| 17 |
+
self.panic_threshold = 110 # If BPM hits this, we cut the MT5 bridge
|
| 18 |
+
|
| 19 |
+
def evaluate_biometrics(self, current_bpm: int):
|
| 20 |
+
"""
|
| 21 |
+
In the future, this hooks into your smartwatch API.
|
| 22 |
+
For now, we simulate the vibe check.
|
| 23 |
+
"""
|
| 24 |
+
logger.info(f"Psych-Check running. Current BPM: {current_bpm}")
|
| 25 |
+
|
| 26 |
+
if current_bpm >= self.panic_threshold:
|
| 27 |
+
self.trading_locked = True
|
| 28 |
+
logger.warning("π¨ PANIC DETECTED π¨. User is stressing. Locking MT5 terminal.")
|
| 29 |
+
return {
|
| 30 |
+
"status": "LOCKED",
|
| 31 |
+
"message": "Heart rate too high. Trading disabled for 15 minutes. Touch grass."
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
elif self.trading_locked and current_bpm <= (self.baseline_bpm + 10):
|
| 35 |
+
self.trading_locked = False
|
| 36 |
+
logger.info("User has calmed down. Unlocking terminal. Let him cook.")
|
| 37 |
+
return {
|
| 38 |
+
"status": "UNLOCKED",
|
| 39 |
+
"message": "Vitals stable. You may resume trading."
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
return {"status": "ACTIVE", "message": "Stay frosty."}
|
| 43 |
+
|
| 44 |
+
# Global instance
|
| 45 |
+
psych_ward = PsychCheckEngine()
|
app/services/telegram_mod.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/services/telegram_mod.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
import asyncio
|
| 8 |
+
# π FIX: Added 'app.' prefix
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger("Telegram-Bouncer")
|
| 12 |
+
|
| 13 |
+
class TelegramModerator:
|
| 14 |
+
def __init__(self):
|
| 15 |
+
self.bot_token = getattr(settings, "TELEGRAM_BOT_TOKEN", None)
|
| 16 |
+
self.banned_words = ["spoonfeed", "leak", "exam answers", "buy signal", "crypto pump"]
|
| 17 |
+
|
| 18 |
+
if self.bot_token:
|
| 19 |
+
logger.info("Telegram Bouncer is at the door. Checking IDs.")
|
| 20 |
+
else:
|
| 21 |
+
logger.warning("No Telegram bot token. The group chat is currently the Wild West.")
|
| 22 |
+
|
| 23 |
+
async def check_message(self, user_id: str, message_text: str) -> dict:
|
| 24 |
+
logger.info(f"Scanning message from user {user_id}...")
|
| 25 |
+
message_lower = message_text.lower()
|
| 26 |
+
|
| 27 |
+
for word in self.banned_words:
|
| 28 |
+
if word in message_lower:
|
| 29 |
+
logger.warning(f"Opp detected! User {user_id} used banned word: {word}")
|
| 30 |
+
return {
|
| 31 |
+
"action": "DELETE_AND_WARN",
|
| 32 |
+
"message": f"Bro, we don't do '{word}' here. Lock in and study. Strike 1."
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
if "anatomy" in message_lower or "usmle" in message_lower:
|
| 36 |
+
return {"action": "REACT", "emoji": "π§ ", "message": "Valid academic discussion. Let him cook."}
|
| 37 |
+
|
| 38 |
+
return {"action": "NONE", "message": "Message clean. Vibe check passed."}
|
| 39 |
+
|
| 40 |
+
async def start_polling(self):
|
| 41 |
+
if not self.bot_token:
|
| 42 |
+
return
|
| 43 |
+
|
| 44 |
+
logger.info("Starting Telegram polling loop... Watchlist active.")
|
| 45 |
+
while True:
|
| 46 |
+
await asyncio.sleep(5)
|
| 47 |
+
|
| 48 |
+
telegram_bouncer = TelegramModerator()
|
app/worker/heartbeat.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
#FILE: backend/app/worker/heartbeat.py
|
| 3 |
+
#VERSION: 1.0.1 | SYSTEM: Jarvis Protocol
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import logging
|
| 8 |
+
# π FIX: Added 'app.' prefix to all service imports!
|
| 9 |
+
from app.services.governor import governor
|
| 10 |
+
from app.services.cate import cate
|
| 11 |
+
from app.services.blast import blast_engine
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger("Orbit-Heartbeat")
|
| 14 |
+
|
| 15 |
+
async def forex_market_watch():
|
| 16 |
+
"""Simulates a 24/7 background loop checking MT5."""
|
| 17 |
+
while True:
|
| 18 |
+
logger.debug("Scanning Forex charts... XAUUSD looking mid.")
|
| 19 |
+
fake_tp_event = False
|
| 20 |
+
|
| 21 |
+
if fake_tp_event:
|
| 22 |
+
await blast_engine.blast_event(
|
| 23 |
+
"TRADE_UPDATE",
|
| 24 |
+
{"pair": "XAUUSD", "status": "TP_HIT", "pnl": "+$500", "message": "Bag secured. Go buy some fish."}
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
await asyncio.sleep(60)
|
| 28 |
+
|
| 29 |
+
async def study_nag_loop():
|
| 30 |
+
"""Checks the Governor to see if you should be studying."""
|
| 31 |
+
while True:
|
| 32 |
+
vibe = governor.get_current_recommendation()
|
| 33 |
+
if "DEEP_WORK" in vibe:
|
| 34 |
+
if cate.evaluate_trigger("STUDY_REMINDER", "HIGH"):
|
| 35 |
+
await blast_engine.blast_event(
|
| 36 |
+
"MED_SCHOLAR",
|
| 37 |
+
{"message": "Bro, Internal Med isn't going to read itself. Open the textbook."}
|
| 38 |
+
)
|
| 39 |
+
await asyncio.sleep(3600)
|
| 40 |
+
|
| 41 |
+
async def start_heartbeat():
|
| 42 |
+
logger.info("Starting Orbit Heartbeat... π«")
|
| 43 |
+
asyncio.create_task(forex_market_watch())
|
| 44 |
+
asyncio.create_task(study_nag_loop())
|
fix_db.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from sqlalchemy import text
|
| 3 |
+
from app.db.session import engine
|
| 4 |
+
|
| 5 |
+
async def run_migration():
|
| 6 |
+
async with engine.begin() as conn:
|
| 7 |
+
print("Checking database columns...")
|
| 8 |
+
# Check if remarks column exists
|
| 9 |
+
result = await conn.execute(text("""
|
| 10 |
+
SELECT column_name
|
| 11 |
+
FROM information_schema.columns
|
| 12 |
+
WHERE table_name='study_tasks' AND column_name='remarks';
|
| 13 |
+
"""))
|
| 14 |
+
exists = result.fetchone()
|
| 15 |
+
|
| 16 |
+
if not exists:
|
| 17 |
+
print("Adding 'remarks' column to 'study_tasks'...")
|
| 18 |
+
await conn.execute(text("ALTER TABLE study_tasks ADD COLUMN remarks TEXT;"))
|
| 19 |
+
print("Column 'remarks' added.")
|
| 20 |
+
else:
|
| 21 |
+
print("Column 'remarks' already exists.")
|
| 22 |
+
|
| 23 |
+
# Also check for is_reminder
|
| 24 |
+
result = await conn.execute(text("""
|
| 25 |
+
SELECT column_name
|
| 26 |
+
FROM information_schema.columns
|
| 27 |
+
WHERE table_name='study_tasks' AND column_name='is_reminder';
|
| 28 |
+
"""))
|
| 29 |
+
exists = result.fetchone()
|
| 30 |
+
|
| 31 |
+
if not exists:
|
| 32 |
+
print("Adding 'is_reminder' column to 'study_tasks'...")
|
| 33 |
+
await conn.execute(text("ALTER TABLE study_tasks ADD COLUMN is_reminder BOOLEAN DEFAULT FALSE;"))
|
| 34 |
+
print("Column 'is_reminder' added.")
|
| 35 |
+
else:
|
| 36 |
+
print("Column 'is_reminder' already exists.")
|
| 37 |
+
|
| 38 |
+
if __name__ == "__main__":
|
| 39 |
+
asyncio.run(run_migration())
|
inject_task.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ================================================================================
|
| 2 |
+
# FILE: /Projects/Orbit/backend/inject_task.py
|
| 3 |
+
# PURPOSE: Direct Market Access to PostgreSQL πͺ
|
| 4 |
+
# ================================================================================
|
| 5 |
+
|
| 6 |
+
import sys
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
# π οΈ THE HEDGE: Python is having amnesia. We forcefully inject the backend
|
| 10 |
+
# folder into its memory (sys.path) so it knows where the 'app' module lives.
|
| 11 |
+
# No more fakeouts!
|
| 12 |
+
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
| 13 |
+
|
| 14 |
+
from app.db.database import SessionLocal
|
| 15 |
+
from app.models.task import Task
|
| 16 |
+
from datetime import datetime, timedelta
|
| 17 |
+
|
| 18 |
+
def inject_liquidity():
|
| 19 |
+
print("Opening connection to the dark pool (Postgres)... π¦")
|
| 20 |
+
db = SessionLocal()
|
| 21 |
+
|
| 22 |
+
try:
|
| 23 |
+
# Create a new limit order (Task)
|
| 24 |
+
new_task = Task(
|
| 25 |
+
title="Review Pharmacology Flashcards",
|
| 26 |
+
subject="Internal Medicine",
|
| 27 |
+
brain_rot_level="HIGH", # Bro is cooked π
|
| 28 |
+
completed=False,
|
| 29 |
+
due_date=datetime.now() + timedelta(days=2)
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# Add to the session and commit the transaction
|
| 33 |
+
db.add(new_task)
|
| 34 |
+
db.commit()
|
| 35 |
+
|
| 36 |
+
print(f"β
W Secured! Task '{new_task.title}' injected into Orbit.")
|
| 37 |
+
print("Go tap the Refresh button on your phone now! π±β¨")
|
| 38 |
+
|
| 39 |
+
except Exception as e:
|
| 40 |
+
print(f"β Stop Loss Hit! Transaction failed: {e}")
|
| 41 |
+
db.rollback()
|
| 42 |
+
finally:
|
| 43 |
+
db.close()
|
| 44 |
+
|
| 45 |
+
if __name__ == "__main__":
|
| 46 |
+
inject_liquidity()
|
render.yaml
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/render.yaml
|
| 3 |
+
# VERSION: 1.0.0 | SYSTEM: Infrastructure as Code (IaC)
|
| 4 |
+
################################################################################
|
| 5 |
+
#
|
| 6 |
+
# This file tells Render exactly how to build your entire stack.
|
| 7 |
+
# It creates the Postgres DB, Redis instance, and the FastAPI Web Service
|
| 8 |
+
# all connected together automatically. It's basically a setup cheat code.
|
| 9 |
+
|
| 10 |
+
services:
|
| 11 |
+
# 1. The Brain (FastAPI Web Service)
|
| 12 |
+
- type: web
|
| 13 |
+
name: orbit-api
|
| 14 |
+
env: docker # We are using the Dockerfile we just updated
|
| 15 |
+
region: frankfurt # Choose a region close to Kenya for better latency (Frankfurt is usually good)
|
| 16 |
+
plan: free # Keepin' it free tier for now
|
| 17 |
+
envVars:
|
| 18 |
+
# Connect the API to the DBs created below
|
| 19 |
+
- key: DATABASE_URL
|
| 20 |
+
fromDatabase:
|
| 21 |
+
name: orbit-db
|
| 22 |
+
property: connectionString
|
| 23 |
+
- key: REDIS_URL
|
| 24 |
+
fromService:
|
| 25 |
+
type: redis
|
| 26 |
+
name: orbit-redis
|
| 27 |
+
property: connectionString
|
| 28 |
+
# Add your API keys here (Render will prompt you for these or you can set them in the dashboard later)
|
| 29 |
+
# - key: GEMINI_API_KEY
|
| 30 |
+
# sync: false
|
| 31 |
+
|
| 32 |
+
# 2. The Long-Term Memory (PostgreSQL)
|
| 33 |
+
- type: pgo
|
| 34 |
+
name: orbit-db
|
| 35 |
+
plan: free
|
| 36 |
+
region: frankfurt
|
| 37 |
+
# Render manages the Postgres version automatically
|
| 38 |
+
|
| 39 |
+
# 3. The Short-Term Memory / Trigger Cache (Redis)
|
| 40 |
+
- type: redis
|
| 41 |
+
name: orbit-redis
|
| 42 |
+
plan: free
|
| 43 |
+
region: frankfurt
|
| 44 |
+
ipAllowList: [] # Allow internal Render connections only
|
requirements.txt
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################################
|
| 2 |
+
# FILE: backend/requirements.txt
|
| 3 |
+
# VERSION: 1.2.1 | SYSTEM: Orbit Brain Upgrade + Memory
|
| 4 |
+
################################################################################
|
| 5 |
+
|
| 6 |
+
# Core Framework
|
| 7 |
+
fastapi>=0.104.1,<0.105.0
|
| 8 |
+
uvicorn[standard]>=0.24.0.post1,<0.25.0
|
| 9 |
+
python-dotenv>=1.0.0,<1.1.0
|
| 10 |
+
|
| 11 |
+
# Database & ORM
|
| 12 |
+
sqlalchemy>=2.0.23,<2.1.0
|
| 13 |
+
asyncpg>=0.29.0,<0.30.0
|
| 14 |
+
alembic>=1.12.1,<1.13.0
|
| 15 |
+
|
| 16 |
+
# Security
|
| 17 |
+
pydantic>=2.5.2,<2.6.0
|
| 18 |
+
pydantic-settings>=2.1.0,<2.2.0
|
| 19 |
+
python-jose[cryptography]>=3.3.0,<3.4.0
|
| 20 |
+
passlib[bcrypt]>=1.7.4,<1.8.0
|
| 21 |
+
|
| 22 |
+
# AI & Agents
|
| 23 |
+
google-genai>=0.3.0,<0.4.0
|
| 24 |
+
google-api-core>=2.11.1
|
| 25 |
+
chromadb>=0.4.24
|
| 26 |
+
|
| 27 |
+
# Data & Integrations
|
| 28 |
+
redis>=5.0.1,<5.1.0
|
| 29 |
+
pytz>=2023.3.post1,<2023.4
|
| 30 |
+
sentence-transformers>=2.2.2
|
test_neon_db.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#>>>--- START_FILE_BLOCK: backend/test_neon_db.py
|
| 2 |
+
################################################################################
|
| 3 |
+
# FILE: backend/test_neon_db.py
|
| 4 |
+
# VERSION: 1.0.0 | SYSTEM: Neon DB Snitch / Diagnostic EA
|
| 5 |
+
################################################################################
|
| 6 |
+
#
|
| 7 |
+
# PURPOSE:
|
| 8 |
+
# Bypasses the main app to interrogate the Neon Postgres Database directly.
|
| 9 |
+
# Detects cold-starts, invalid passwords, SSL tantrums, and connection drops.
|
| 10 |
+
|
| 11 |
+
import asyncio
|
| 12 |
+
import asyncpg
|
| 13 |
+
import os
|
| 14 |
+
import time
|
| 15 |
+
from urllib.parse import urlparse, unquote
|
| 16 |
+
|
| 17 |
+
async def run_snitch():
|
| 18 |
+
print("==================================================")
|
| 19 |
+
print(" π΅οΈββοΈ NEON DB SNITCH v1.0 (INITIALIZING PROBE) ")
|
| 20 |
+
print("==================================================")
|
| 21 |
+
|
| 22 |
+
# 1. Fetch the raw liquidity (Environment Variable)
|
| 23 |
+
raw_url = os.getenv("DATABASE_URL")
|
| 24 |
+
|
| 25 |
+
if not raw_url:
|
| 26 |
+
print("β FATAL: DATABASE_URL is completely missing! Mr. Hugging Face's vault is empty.")
|
| 27 |
+
return
|
| 28 |
+
|
| 29 |
+
# Hide the password so we don't leak it on the public blockchain (logs)
|
| 30 |
+
parsed = urlparse(raw_url)
|
| 31 |
+
safe_url = raw_url.replace(parsed.password, "********") if parsed.password else raw_url
|
| 32 |
+
print(f"π RAW URL DETECTED: {safe_url}")
|
| 33 |
+
|
| 34 |
+
# 2. Apply the Anti-Slippage Fixes exactly like config.py
|
| 35 |
+
# Remove SQLAlchemy's '+asyncpg' because pure asyncpg hates it
|
| 36 |
+
clean_url = raw_url.replace("postgresql+asyncpg://", "postgresql://")
|
| 37 |
+
|
| 38 |
+
# Strip Neon's weird parameters
|
| 39 |
+
if "?" in clean_url:
|
| 40 |
+
clean_url = clean_url.split("?")[0]
|
| 41 |
+
|
| 42 |
+
# Force pure SSL
|
| 43 |
+
clean_url += "?ssl=require"
|
| 44 |
+
|
| 45 |
+
clean_parsed = urlparse(clean_url)
|
| 46 |
+
safe_clean_url = clean_url.replace(clean_parsed.password, "********") if clean_parsed.password else clean_url
|
| 47 |
+
print(f"π οΈ CLEANED URL (Executing Trade): {safe_clean_url}")
|
| 48 |
+
print("--------------------------------------------------")
|
| 49 |
+
|
| 50 |
+
# 3. Execution Phase: Attempt Connection & Time the Cold Start
|
| 51 |
+
print("β³ Sending Ping to Neon Postgres... Waiting for DB to put its boots on...")
|
| 52 |
+
start_time = time.time()
|
| 53 |
+
|
| 54 |
+
try:
|
| 55 |
+
# We establish a raw connection, zero intermediaries.
|
| 56 |
+
conn = await asyncpg.connect(clean_url, timeout=15.0)
|
| 57 |
+
|
| 58 |
+
latency = round((time.time() - start_time) * 1000, 2)
|
| 59 |
+
print(f"β
CONNECTION SECURED! (Latency: {latency} ms)")
|
| 60 |
+
|
| 61 |
+
if latency > 2000:
|
| 62 |
+
print("β οΈ WARNING: High latency detected! Neon was definitely sleeping. That's a cold start.")
|
| 63 |
+
else:
|
| 64 |
+
print("β‘ DB is wide awake and routing fast!")
|
| 65 |
+
|
| 66 |
+
# 4. Take Profit: Run a test query
|
| 67 |
+
version = await conn.fetchval('SELECT version();')
|
| 68 |
+
print(f"π NEON POSTGRES VERSION: {version[:50]}...")
|
| 69 |
+
|
| 70 |
+
await conn.close()
|
| 71 |
+
print("==================================================")
|
| 72 |
+
print(" π’ ALL SYSTEMS GO. THE VAULT IS OPEN. ")
|
| 73 |
+
print("==================================================")
|
| 74 |
+
|
| 75 |
+
except asyncpg.exceptions.InvalidAuthorizationSpecificationError:
|
| 76 |
+
print("β ERROR: Invalid Password or Username! Neon rejected your credentials. Check your Hugging Face secrets.")
|
| 77 |
+
except asyncpg.exceptions.InvalidCatalogNameError:
|
| 78 |
+
print(f"β ERROR: Database name '{clean_parsed.path[1:]}' does not exist on this Neon server!")
|
| 79 |
+
except asyncio.TimeoutError:
|
| 80 |
+
print("β ERROR: Connection Timed Out! Neon took too long to wake up, or Safaricom dropped the packet somewhere over Thika.")
|
| 81 |
+
except Exception as e:
|
| 82 |
+
print(f"β UNKNOWN SLIPPAGE DETECTED: {str(e)}")
|
| 83 |
+
print(" -> Tip: Check if the error mentions 'ssl' or 'channel_binding'.")
|
| 84 |
+
finally:
|
| 85 |
+
print("\n[Snitch execution finished]")
|
| 86 |
+
|
| 87 |
+
if __name__ == "__main__":
|
| 88 |
+
asyncio.run(run_snitch())
|
| 89 |
+
|
| 90 |
+
#<<<--- END_FILE_BLOCK: backend/test_neon_db.py
|