#!/bin/bash

# XbyJob 容器启动脚本
# 先初始化数据库，然后启动服务

set -e

echo "开始启动 XbyJob 服务..."

# 等待MySQL服务可用并进行连接测试
echo "等待MySQL服务启动..."

# 检查必需的环境变量
if [ -z "$MYSQL_USER" ]; then
    echo "错误: MYSQL_USER 环境变量未设置"
    exit 1
fi

if [ -z "$MYSQL_PASSWORD" ]; then
    echo "错误: MYSQL_PASSWORD 环境变量未设置"
    exit 1
fi

for i in {1..30}; do
    if mysql -h mysql -u "$MYSQL_USER" -p"$MYSQL_PASSWORD" --skip-ssl -e "SELECT 1" >/dev/null 2>&1; then
        echo "MySQL连接测试成功"
        break
    fi
    echo "MySQL连接测试失败，等待重试... ($i/30)"
    sleep 2
done

# 最终验证MySQL连接
if ! mysql -h mysql -u "$MYSQL_USER" -p"$MYSQL_PASSWORD" --skip-ssl -e "SELECT 1" >/dev/null 2>&1; then
    echo "错误：无法连接到MySQL数据库，服务启动失败"
    exit 1
fi
echo "MySQL服务已就绪"

# 等待Redis服务可用
echo "等待Redis服务启动..."
sleep 10
echo "Redis服务已就绪"

# 初始化数据库（仅在app-services容器中执行）
if [ "$SERVICE_TYPE" = "app-services" ]; then
    echo "初始化数据库..."
    python scripts/init_db.py init
    if [ $? -eq 0 ]; then
        echo "数据库初始化成功"
    else
        echo "数据库初始化失败，但继续启动服务"
    fi
fi

# 根据服务类型启动相应的服务
case "$SERVICE_TYPE" in
    "app-services")
        echo "启动应用服务容器（Web + Celery服务API + 执行器API）..."
        
        # 启动Web服务在后台
        echo "启动Web服务..."
        python -m services.web.app &
        WEB_PID=$!
        
        # 启动Celery服务API在后台
        echo "启动Celery服务API..."
        python -m services.celery_service &
        CELERY_API_PID=$!
        
        # 启动执行器API在后台
        echo "启动执行器API..."
        python -m services.executor.dependency_api &
        EXECUTOR_API_PID=$!
        
        # 等待所有服务启动
        sleep 5
        echo "所有应用服务已启动"
        
        # 监控进程，如果任何一个退出就重启
        while true; do
            if ! kill -0 $WEB_PID 2>/dev/null; then
                echo "Web服务异常退出，重启中..."
                python -m services.web.app &
                WEB_PID=$!
            fi
            if ! kill -0 $CELERY_API_PID 2>/dev/null; then
                echo "Celery服务API异常退出，重启中..."
                python -m services.celery_service &
                CELERY_API_PID=$!
            fi
            if ! kill -0 $EXECUTOR_API_PID 2>/dev/null; then
                echo "执行器API异常退出，重启中..."
                python -m services.executor.dependency_api &
                EXECUTOR_API_PID=$!
            fi
            sleep 10
        done
        ;;
    "celery-workers")
        echo "启动Celery工作容器（Beat + Scheduler Worker + Executor Worker + Dependency API）..."
        
        # 额外的数据库连接验证
        echo "验证数据库连接..."
        python -c "from services.shared.db_pool import get_db_session; from sqlalchemy import text; session = get_db_session(); session.execute(text('SELECT 1')); print('数据库连接验证成功')"
        
        # 启动依赖管理API在后台
        echo "启动依赖管理API..."
        python -m services.executor.dependency_api &
        DEPENDENCY_API_PID=$!
        
        # 启动Celery Beat在后台
        echo "启动Celery Beat..."
        celery -A services.shared.celery_app beat --loglevel=info &
        BEAT_PID=$!
        
        # 启动调度器Worker在后台
        echo "启动Celery调度器Worker..."
        celery -A services.scheduler.tasks worker --loglevel=info --concurrency=2 --queues=scheduler -n scheduler@%h &
        SCHEDULER_PID=$!
        
        # 启动执行器Worker在后台
        echo "启动Celery执行器Worker..."
        celery -A services.executor.tasks worker --loglevel=info --concurrency=20 --queues=executor -n executor@%h &
        EXECUTOR_PID=$!
        
        # 等待所有Worker启动
        sleep 5
        echo "所有Celery工作进程已启动"
        
        # 监控进程，如果任何一个退出就重启
        while true; do
            if ! kill -0 $DEPENDENCY_API_PID 2>/dev/null; then
                echo "依赖管理API异常退出，重启中..."
                python -m services.executor.dependency_api &
                DEPENDENCY_API_PID=$!
            fi
            if ! kill -0 $BEAT_PID 2>/dev/null; then
                echo "Celery Beat异常退出，重启中..."
                celery -A services.shared.celery_app beat --loglevel=info &
                BEAT_PID=$!
            fi
            if ! kill -0 $SCHEDULER_PID 2>/dev/null; then
                echo "调度器Worker异常退出，重启中..."
                celery -A services.scheduler.tasks worker --loglevel=info --concurrency=2 --queues=scheduler -n scheduler@%h &
                SCHEDULER_PID=$!
            fi
            if ! kill -0 $EXECUTOR_PID 2>/dev/null; then
                echo "执行器Worker异常退出，重启中..."
                celery -A services.executor.tasks worker --loglevel=info --concurrency=20 --queues=executor -n executor@%h &
                EXECUTOR_PID=$!
            fi
            sleep 10
        done
        ;;
    # 保留旧的服务类型
    "web")
        echo "启动Web服务..."
        exec python -m services.web.app
        ;;
    "celery-service")
        echo "启动Celery服务..."
        python -m services.celery_service &
        exec celery -A services.celery_service worker --loglevel=info --concurrency=4 --queues=executor
        ;;
    "celery-scheduler")
        echo "启动Celery调度器..."
        python -c "from services.shared.db_pool import get_db_session; from sqlalchemy import text; session = get_db_session(); session.execute(text('SELECT 1')); print('数据库连接验证成功')"
        exec celery -A services.scheduler.tasks worker --loglevel=info --concurrency=2 --queues=scheduler
        ;;
    "celery-beat")
        echo "启动Celery Beat..."
        exec celery -A services.shared.celery_app beat --loglevel=info
        ;;
    "celery-executor")
        echo "启动Celery执行器..."
        python -c "from services.shared.db_pool import get_db_session; from sqlalchemy import text; session = get_db_session(); session.execute(text('SELECT 1')); print('数据库连接验证成功')"
        exec celery -A services.executor.tasks worker --loglevel=info --concurrency=20 --queues=executor
        ;;
    "executor-api")
        echo "启动执行器API..."
        exec python -m services.executor.dependency_api
        ;;
    *)
        echo "未知的服务类型: $SERVICE_TYPE"
        exit 1
        ;;
esac