import json
import logging
import shlex
import subprocess
from logging.handlers import TimedRotatingFileHandler

from fastapi import FastAPI, HTTPException, BackgroundTasks

# 配置日志记录器，设置日志级别、格式以及输出的日志文件名（可根据实际需求调整）
# logging.basicConfig(level=logging.INFO,
#                     format='%(asctime)s - %(levelname)s - %(message)s',
#                     filename='spark_server.log')

# 创建一个日志记录器，按天分割日志文件
handler = TimedRotatingFileHandler(
    "spark_server.log",
    when="D",
    backupCount=7)
handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(logging.INFO)
# 日志配置完成

app = FastAPI()


@app.get("/")
async def root():
    return {"message": "Hello World"}


@app.get("/hello/{name}")
async def say_hello(name: str):
    return {"message": f"Hello {name}"}

# 第三方jar包路径
JARS = (
    "/data/spark/spark-3.5.3-bin-hadoop3/spark_jars/fastjson2-2.0.17.jar,"
    "/data/spark/spark-3.5.3-bin-hadoop3/spark_jars/spark-doris-connector-3.4_2.12-1.3.2.jar,"
    "/data/spark/spark-3.5.3-bin-hadoop3/spark_jars/joda-time-2.10.14.jar,"
    "/data/spark/spark-3.5.3-bin-hadoop3/spark_jars/mysql-connector-java-8.0.13.jar,"
)


# 命令执行完成后记录日志
def exeLogs(process: subprocess.Popen):
    while True:
        output = process.stdout.readline()
        if output == '' and process.poll() is not None:
            break
        if output:
            logging.info(f"stdout: {output.strip()}")
    while True:
        error = process.stderr.readline()
        if error == '' and process.poll() is not None:
            break
        if error:
            logging.info(f"stderr: {error.strip()}")
    logging.info(f"命令执行完成， pid: {process.pid}，return code返回码: {process.returncode}, poll返回码: {process.poll()}")
    logging.info("-" * 100 + "\n")


@app.post("/spark/submit/")
async def submit(json_data: dict, tasks: BackgroundTasks):
    # 记录接收到的JSON参数信息
    logging.info(f"接收到的提交Spark任务的JSON参数: {json_data}")

    try:
        # 将JsonObject参数转换为JSON字符串，以便传递给spark - submit脚本
        json_str = json.dumps(json_data)

        # 构建spark - submit命令
        # 构建spark-submit命令，将JSON字符串作为参数传递
        command = [
            "/data/spark/spark-3.5.3-bin-hadoop3/bin/spark-submit",
            "--class com.swkj.wjk.Main",  # Spark程序主类
            "--master spark://192.168.10.90:60061",  # 集群管理器 (yarn, local, spark://<host>:<port>, etc.)
            "--deploy-mode cluster",  # 部署模式 (cluster 或 client)
            "--driver-memory 4G",  # Driver 的内存
            "--executor-memory 4G",  # 每个 Executor 的内存
            f"--jars {JARS}",  # 添加第三方jar包
            "--total-executor-cores 2",  # standalone and Mesos only
            # "--executor-cores", "2",  # YARN and Kubernetes only
            # "--num-executors", "2",  # YARN and Kubernetes only
            "/data/spark/spark-3.5.3-bin-hadoop3/spark_jars/spark_dataview-1.0-SNAPSHOT.jar",  # spark的jar包路径
            shlex.quote(json_str)  # 使用shlex.quote来确保JSON字符串中的特殊字符被正确转义
        ]

        # 构建spark-submit命令
        cmd = " \\\n\t".join(command)

        # 记录构建的spark-submit命令信息
        logging.info(f"构建的spark-submit命令:\n{cmd}\n")

        # 执行spark-submit命令
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, text=True)

        tasks.add_task(exeLogs, process)

        # 构建返回结果
        result = {
            "code": 0,
            "pid": process.pid,
            # poll返回码：0 正常结束/ 1 sleep/ 2 子进程不存在/ -15 kill/ None 在运行
            "poll": process.poll(),
            "message": "Spark任务已提交",
        }

        logging.info(f"Spark任务提交结果: {result}")
        return result

    except Exception as e:
        logging.error(f"提交Spark任务出现异常: {str(e)}")
        raise HTTPException(status_code=500, detail=f"提交Spark任务出现异常: {str(e)}")


if __name__ == "__main__":
    # 运行FastAPI应用
    import uvicorn

    # 指定FastAPI应用的主机和端口
    uvicorn.run(app, host="0.0.0.0", port=60100)
