import os
import subprocess
import sys
import time

# 常量定义
WORK_DIR = "/root/autodl-tmp/rope"  # 定义工作目录
REDIS_LOG = os.path.join(WORK_DIR, "redis.log")
FASTAPI_LOG = os.path.join(WORK_DIR, "fastapi.log")
CELERY_LOG = os.path.join(WORK_DIR, "celery.log")  # 定义Celery的日志文件
# 确保所有日志文件都存在，如果不存在则创建
for log_path in [REDIS_LOG, FASTAPI_LOG, CELERY_LOG]:
    if not os.path.exists(log_path):
        with open(log_path, 'w') as f:
            pass

# FASTAPI_CMD_PATTERN = "python.*run.py"
FASTAPI_CMD_PATTERN = "gunicorn.*run:app"
MAX_LOG_SIZE = 1 * 1024 * 1024  # 1MB的任务

def start_celery_in_background(proc_count):
    if not is_process_running("celery worker"):
        print(f"Starting Celery with {proc_count} processes in the background...")
        trim_log(CELERY_LOG)
        with open(CELERY_LOG, 'a') as f:
            cmd = ["celery", "-A", "celerytask", "worker", "--loglevel=info", "-c", str(proc_count),
                   "--max-tasks-per-child", "200000"]  # 执行2万个任务以后,celery会自动重启一下进程
            subprocess.Popen(cmd, stdout=f, stderr=f) 
        time.sleep(5)
        print("暂停几秒，等待Celery初始化完毕")
    else:
        print("Celery is already running!")

# 工具函数
def is_process_running(process_pattern):
    try:
        cmd = f"ps aux | grep '{process_pattern}' | grep -v grep"
        output = subprocess.check_output(cmd, shell=True).decode().strip()
        return bool(output)
    except subprocess.CalledProcessError:
        return False


def trim_log(log_file):
    if os.path.exists(log_file) and os.path.getsize(log_file) > MAX_LOG_SIZE:
        with open(log_file, 'rb') as file:  # 注意这里改为'rb'
            file.seek(-MAX_LOG_SIZE, 2)  # Go to the last 10MB of the file
            file.readline()  # skip first incomplete line
            content = file.read()
        with open(log_file, 'w') as file:
            file.write(content.decode())  # 注意这里添加了.decode()，因为content现在是二进制数据


# 主要功能函数
def start_redis():
    if not is_process_running("redis-server"):
        print("Starting Redis server...")
        trim_log(REDIS_LOG)
        with open(REDIS_LOG, 'a') as f:
            subprocess.Popen(["redis-server", "--daemonize", "yes"], stdout=f, stderr=f)
    else:
        print("Redis server is already running!")



def is_process_running_on_port(port):
    try:
        cmd = f"lsof -i :{port}"
        output = subprocess.check_output(cmd, shell=True).decode().strip()
        return bool(output)
    except subprocess.CalledProcessError:
        return False

def start_fastapi():
    port = 6006
    if not is_process_running_on_port(port):
    # if not is_process_running(FASTAPI_CMD_PATTERN):
        print("Starting FastAPI server...")
        os.chdir(WORK_DIR)
        trim_log(FASTAPI_LOG)
        with open(FASTAPI_LOG, 'a') as f:
            # 这个接口是前台阻塞启动的,如果要后台启动,就用这个subprocess.Popen(),为什么没有再前台运行?
            # subprocess.run(["python", os.path.join(WORK_DIR, "run.py")], stdout=f, stderr=f)
            subprocess.Popen(["gunicorn", "run:app", "-w", "10", "-k", "uvicorn.workers.UvicornWorker", "-b",f"0.0.0.0:{port}"], stdout=f, stderr=f)
            
    else:
        print("FastAPI server is already running!")


def start_celery(proc_count):
    print(f"Starting Celery with {proc_count} processes...")
    cmd = ["celery", "-A", "celerytask", "worker", "--loglevel=info", "-c", str(proc_count),
           "--max-tasks-per-child", "200000"]  # 执行2万个任务以后,celery会自动重启一下进程
    subprocess.run(cmd)


# 主函数
if __name__ == "__main__":
    start_redis()

    proc_count = 8  # 默认的工作线程数是11个线程数
    
    
    if "-c" in sys.argv:
        idx = sys.argv.index("-c")
        if idx + 1 < len(sys.argv) and sys.argv[idx + 1].isdigit():
            proc_count = int(sys.argv[idx + 1])
            start_celery_in_background(proc_count=proc_count)
    start_fastapi()
