Spaces:
Running
Running
keep alive all services
Browse files- cronjob_service.py +8 -0
- keep_projects_alive_service.py +14 -0
- mongodb_keep_alive_service.py +55 -0
- neondb_postgres_keep_alive_service.py +83 -0
- pinecone_keep_alive_service.py +34 -0
- requirements.txt +5 -1
- supabase_keep_alive_service.py +74 -0
cronjob_service.py
CHANGED
|
@@ -9,6 +9,8 @@ import os
|
|
| 9 |
from twilio.rest import Client
|
| 10 |
from twilio.base.exceptions import TwilioRestException
|
| 11 |
|
|
|
|
|
|
|
| 12 |
load_dotenv()
|
| 13 |
|
| 14 |
# --- Twilio Configuration (New Additions) ---
|
|
@@ -104,6 +106,12 @@ def init_scheduler():
|
|
| 104 |
id='daily_chat_limit_reset_api_users'
|
| 105 |
)
|
| 106 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
scheduler.start()
|
| 108 |
|
| 109 |
# New: Optional startup notification
|
|
|
|
| 9 |
from twilio.rest import Client
|
| 10 |
from twilio.base.exceptions import TwilioRestException
|
| 11 |
|
| 12 |
+
from keep_projects_alive_service import ping_all_projects
|
| 13 |
+
|
| 14 |
load_dotenv()
|
| 15 |
|
| 16 |
# --- Twilio Configuration (New Additions) ---
|
|
|
|
| 106 |
id='daily_chat_limit_reset_api_users'
|
| 107 |
)
|
| 108 |
|
| 109 |
+
scheduler.add_job(
|
| 110 |
+
ping_all_projects,
|
| 111 |
+
trigger=CronTrigger(hour=0, minute=10, second=0),
|
| 112 |
+
id='ping_all_projects'
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
scheduler.start()
|
| 116 |
|
| 117 |
# New: Optional startup notification
|
keep_projects_alive_service.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
|
| 3 |
+
from mongodb_keep_alive_service import ping_mongodb_projects
|
| 4 |
+
from neondb_postgres_keep_alive_service import ping_neondb_projects
|
| 5 |
+
from pinecone_keep_alive_service import ping_all_pinecone_indexes
|
| 6 |
+
from supabase_keep_alive_service import ping_all_supabase_projects
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def ping_all_projects():
|
| 10 |
+
ping_all_supabase_projects()
|
| 11 |
+
ping_neondb_projects()
|
| 12 |
+
ping_all_pinecone_indexes()
|
| 13 |
+
ping_mongodb_projects()
|
| 14 |
+
|
mongodb_keep_alive_service.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import logging
|
| 3 |
+
from urllib.parse import parse_qs, urlparse
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
from pymongo import MongoClient
|
| 6 |
+
|
| 7 |
+
# Configure logging
|
| 8 |
+
logging.basicConfig(
|
| 9 |
+
level=logging.INFO,
|
| 10 |
+
format='%(asctime)s - %(levelname)s - %(message)s',
|
| 11 |
+
handlers=[
|
| 12 |
+
logging.StreamHandler()
|
| 13 |
+
]
|
| 14 |
+
)
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
load_dotenv()
|
| 18 |
+
|
| 19 |
+
mongodb_uris = os.getenv('MONGODB_URIS')
|
| 20 |
+
|
| 21 |
+
def extract_app_name_from_uri(uri):
|
| 22 |
+
"""Extracts the appName from MongoDB URI if present."""
|
| 23 |
+
try:
|
| 24 |
+
parsed_uri = urlparse(uri)
|
| 25 |
+
query_params = parse_qs(parsed_uri.query)
|
| 26 |
+
app_name = query_params.get('appName', ['MongoDB Ping Service'])[0]
|
| 27 |
+
return app_name
|
| 28 |
+
except Exception as e:
|
| 29 |
+
logger.debug(f"Failed to extract appName from URI: {e}")
|
| 30 |
+
return "MongoDB Ping Service" # Fallback if parsing fails
|
| 31 |
+
|
| 32 |
+
def ping_mongodb():
|
| 33 |
+
"""Ping MongoDB to keep the connection alive without requiring a default database."""
|
| 34 |
+
if not mongodb_uris:
|
| 35 |
+
logger.warning("No MongoDB URIs found in environment variables")
|
| 36 |
+
return
|
| 37 |
+
|
| 38 |
+
for uri in mongodb_uris.split(','):
|
| 39 |
+
uri = uri.strip() # Remove any whitespace around the URI
|
| 40 |
+
if not uri:
|
| 41 |
+
continue
|
| 42 |
+
|
| 43 |
+
service_name = extract_app_name_from_uri(uri)
|
| 44 |
+
try:
|
| 45 |
+
client = MongoClient(uri)
|
| 46 |
+
client.admin.command('ping')
|
| 47 |
+
logger.info(f"{service_name}: Successfully pinged MongoDB at {uri[:50]}...") # Truncate URI for security
|
| 48 |
+
client.close()
|
| 49 |
+
except Exception as e:
|
| 50 |
+
logger.error(f"{service_name}: Error pinging MongoDB at {uri[:50]}... - {str(e)}")
|
| 51 |
+
|
| 52 |
+
def ping_mongodb_projects():
|
| 53 |
+
logger.info("Starting MongoDB ping checks...")
|
| 54 |
+
ping_mongodb()
|
| 55 |
+
logger.info("Completed MongoDB ping checks.")
|
neondb_postgres_keep_alive_service.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import logging
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
import psycopg2
|
| 5 |
+
from psycopg2 import OperationalError
|
| 6 |
+
from urllib.parse import urlparse
|
| 7 |
+
|
| 8 |
+
# Configure logging
|
| 9 |
+
logging.basicConfig(
|
| 10 |
+
level=logging.INFO,
|
| 11 |
+
format='%(asctime)s - %(levelname)s - %(message)s'
|
| 12 |
+
)
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
load_dotenv()
|
| 16 |
+
|
| 17 |
+
NEONDB_POSTGRES_URIS = os.getenv('NEONDB_POSTGRES_URIS', '').split(',')
|
| 18 |
+
|
| 19 |
+
def ping_neondb():
|
| 20 |
+
"""
|
| 21 |
+
Ping all NeonDB instances to keep connections alive.
|
| 22 |
+
Returns a dictionary with connection status for each database.
|
| 23 |
+
"""
|
| 24 |
+
results = {}
|
| 25 |
+
|
| 26 |
+
for uri in filter(None, (u.strip() for u in NEONDB_POSTGRES_URIS)):
|
| 27 |
+
try:
|
| 28 |
+
# Parse the URI to extract database name
|
| 29 |
+
parsed_uri = urlparse(uri)
|
| 30 |
+
db_name = parsed_uri.path[1:] # Remove the leading '/'
|
| 31 |
+
host = parsed_uri.hostname
|
| 32 |
+
|
| 33 |
+
# Connect to the database
|
| 34 |
+
conn = psycopg2.connect(uri)
|
| 35 |
+
cursor = conn.cursor()
|
| 36 |
+
|
| 37 |
+
# Execute a simple query
|
| 38 |
+
cursor.execute("SELECT 1")
|
| 39 |
+
cursor.fetchone()
|
| 40 |
+
|
| 41 |
+
# Close the connection
|
| 42 |
+
cursor.close()
|
| 43 |
+
conn.close()
|
| 44 |
+
|
| 45 |
+
results[db_name] = {
|
| 46 |
+
'host': host,
|
| 47 |
+
'status': 'Ping successful',
|
| 48 |
+
'details': f"Successfully connected to database '{db_name}' on {host}"
|
| 49 |
+
}
|
| 50 |
+
logger.info(f"Successfully connected to database '{db_name}' on {host}")
|
| 51 |
+
|
| 52 |
+
except OperationalError as e:
|
| 53 |
+
results[db_name] = {
|
| 54 |
+
'host': host,
|
| 55 |
+
'status': 'Connection failed',
|
| 56 |
+
'details': str(e)
|
| 57 |
+
}
|
| 58 |
+
logger.error(f"Connection failed to database '{db_name}' on {host}: {str(e)}")
|
| 59 |
+
except Exception as e:
|
| 60 |
+
results[db_name] = {
|
| 61 |
+
'host': host,
|
| 62 |
+
'status': 'Error',
|
| 63 |
+
'details': str(e)
|
| 64 |
+
}
|
| 65 |
+
logger.error(f"Error connecting to database '{db_name}' on {host}: {str(e)}")
|
| 66 |
+
|
| 67 |
+
return results
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# Example usage
|
| 71 |
+
def ping_neondb_projects():
|
| 72 |
+
logger.info("Starting NeonDB ping checks...")
|
| 73 |
+
ping_results = ping_neondb()
|
| 74 |
+
|
| 75 |
+
for db_name, result in ping_results.items():
|
| 76 |
+
logger.info(f"Database: {db_name}")
|
| 77 |
+
logger.info(f"Host: {result['host']}")
|
| 78 |
+
logger.info(f"Status: {result['status']}")
|
| 79 |
+
logger.info(f"Details: {result['details']}")
|
| 80 |
+
logger.info("-" * 40)
|
| 81 |
+
|
| 82 |
+
logger.info("Completed NeonDB ping checks.")
|
| 83 |
+
|
pinecone_keep_alive_service.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import time
|
| 3 |
+
import logging
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
from pinecone import Pinecone
|
| 6 |
+
|
| 7 |
+
# Configure logging
|
| 8 |
+
logging.basicConfig(
|
| 9 |
+
level=logging.INFO,
|
| 10 |
+
format='%(asctime)s - %(levelname)s - %(message)s'
|
| 11 |
+
)
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
load_dotenv()
|
| 15 |
+
|
| 16 |
+
# Read comma-separated list of API keys
|
| 17 |
+
keys = os.getenv("PINECONE_API_KEYS", "").split(",")
|
| 18 |
+
|
| 19 |
+
def ping_all_pinecone_indexes():
|
| 20 |
+
for key in filter(None, (k.strip() for k in keys)):
|
| 21 |
+
pc = Pinecone(api_key=key)
|
| 22 |
+
try:
|
| 23 |
+
indexes = pc.list_indexes() # This returns IndexModel objects
|
| 24 |
+
except Exception as e:
|
| 25 |
+
logger.error(f"[{key[:5]}…] failed to list indexes: {e}")
|
| 26 |
+
continue
|
| 27 |
+
|
| 28 |
+
for index in indexes:
|
| 29 |
+
try:
|
| 30 |
+
index_stats = pc.Index(index.name).describe_index_stats()
|
| 31 |
+
logger.info(f"[{key[:5]}…] ping {index.name} ✓")
|
| 32 |
+
except Exception as e:
|
| 33 |
+
logger.error(f"[{key[:5]}…] ping {index.name} ✗: {e}")
|
| 34 |
+
|
requirements.txt
CHANGED
|
@@ -4,4 +4,8 @@ fastapi
|
|
| 4 |
uvicorn
|
| 5 |
requests
|
| 6 |
dotenv
|
| 7 |
-
twilio
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
uvicorn
|
| 5 |
requests
|
| 6 |
dotenv
|
| 7 |
+
twilio
|
| 8 |
+
pymongo==4.11.2
|
| 9 |
+
psycopg2-binary==2.9.10
|
| 10 |
+
pinecone==6.0.2
|
| 11 |
+
supabase==2.13.0
|
supabase_keep_alive_service.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from supabase import create_client, Client
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
|
| 6 |
+
# Configure logging
|
| 7 |
+
logging.basicConfig(
|
| 8 |
+
level=logging.INFO,
|
| 9 |
+
format='%(asctime)s - %(levelname)s - %(message)s',
|
| 10 |
+
handlers=[
|
| 11 |
+
logging.StreamHandler(), # Output to console
|
| 12 |
+
logging.FileHandler('supabase_ping.log') # Output to file
|
| 13 |
+
]
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
load_dotenv()
|
| 17 |
+
|
| 18 |
+
SUPABASE_PROJECTS = {
|
| 19 |
+
"chat_csv_and_pdf": {
|
| 20 |
+
"url": os.getenv("CHAT_CSV_AND_PDF_SUPABASE_URL"),
|
| 21 |
+
"anon_key": os.getenv("CHAT_CSV_AND_PDF_SUPABASE_ANON_KEY"),
|
| 22 |
+
"table": os.getenv("CHAT_CSV_AND_PDF_TABLE_NAME")
|
| 23 |
+
},
|
| 24 |
+
"customer_support_assistant": {
|
| 25 |
+
"url": os.getenv("CUSTOMER_SUPPORT_ASSISTANT_SUPABASE_URL"),
|
| 26 |
+
"anon_key": os.getenv("CUSTOMER_SUPPORT_ASSISTANT_SUPABASE_ANON_KEY"),
|
| 27 |
+
"table": os.getenv("CUSTOMER_SUPPORT_ASSISTANT_TABLE_NAME")
|
| 28 |
+
},
|
| 29 |
+
"csv_charts": {
|
| 30 |
+
"url": os.getenv("CSV_CHARTS_SUPABASE_URL"),
|
| 31 |
+
"anon_key": os.getenv("CSV_CHARTS_SUPABASE_ANON_KEY"),
|
| 32 |
+
"table": os.getenv("CSV_CHARTS_TABLE_NAME")
|
| 33 |
+
},
|
| 34 |
+
"database_chat": {
|
| 35 |
+
"url": os.getenv("DATABASE_CHAT_SUPABASE_URL"),
|
| 36 |
+
"anon_key": os.getenv("DATABASE_CHAT_SUPABASE_ANON_KEY"),
|
| 37 |
+
"table": os.getenv("DATABASE_CHAT_TABLE_NAME")
|
| 38 |
+
},
|
| 39 |
+
"pixeldream_ai": {
|
| 40 |
+
"url": os.getenv("PIXELDREAM_AI_SUPABASE_URL"),
|
| 41 |
+
"anon_key": os.getenv("PIXELDREAM_AI_SUPABASE_ANON_KEY"),
|
| 42 |
+
"table": os.getenv("PIXELDREAM_AI_TABLE_NAME")
|
| 43 |
+
}
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
def ping_supabase(service_name: str) -> bool:
|
| 47 |
+
"""Ping a Supabase project to keep it active."""
|
| 48 |
+
if service_name not in SUPABASE_PROJECTS:
|
| 49 |
+
logging.error(f"Service '{service_name}' not found in config.")
|
| 50 |
+
return False
|
| 51 |
+
|
| 52 |
+
config = SUPABASE_PROJECTS[service_name]
|
| 53 |
+
supabase: Client = create_client(config["url"], config["anon_key"])
|
| 54 |
+
|
| 55 |
+
try:
|
| 56 |
+
# Simple query to prevent deactivation
|
| 57 |
+
response = supabase.table(config["table"]).select("*").limit(1).execute()
|
| 58 |
+
if not response.data:
|
| 59 |
+
logging.error(f"Failed to ping {service_name}: No data returned.")
|
| 60 |
+
return False
|
| 61 |
+
logging.info(f"Pinged {service_name} successfully.")
|
| 62 |
+
return True
|
| 63 |
+
except Exception as e:
|
| 64 |
+
logging.error(f"Failed to ping {service_name}: {str(e)}", exc_info=True)
|
| 65 |
+
return False
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def ping_all_supabase_projects():
|
| 69 |
+
"""Ping all Supabase projects."""
|
| 70 |
+
logging.info("Pinging all Supabase projects...")
|
| 71 |
+
for service_name in SUPABASE_PROJECTS:
|
| 72 |
+
ping_supabase(service_name)
|
| 73 |
+
|
| 74 |
+
|