# pylint:disable=I1101:c-extension-no-member
# pylint:disable=E1101:no-member
# pylint:disable=C0301:line-too-long
"""perf blaster"""

from os.path import join
import threading

from http import HTTPStatus
import time
import json
import argparse

import psutil
import httpx
from bottle import Bottle, run, request, response
from paste import httpserver

from groot2.cloud.base.http_request.debugger import http_debug

from lib.collector import zenki_collect_and_report_locust_cases, collect_locust_files
from lib.exception import CustomException
from lib.jtl_manager import machine_info_report
from lib.named_popen import execute_in_backgroud_process, get_matching_processes
from lib.network import find_available_port
from lib.env_manager import (
    git_pull,
    zenki_init_python_env,
    init_git_repo,
    kill_locust_processes,
    init_workspace_dir,
    init_logs_dir,
)
from lib.pressure_task import get_pressure_task_sig
from lib.global_share import GlobalShare, note_taker, error_note_taker
from lib.cache import (
    cache,
    cache_master_info,
    cache_worker_info,
    del_cached_locust_processes,
    del_all_cacahed_locust_processes,
    get_cached_locust_processes,
    load_python_interpreter_path,
    update_cached_locust_processes,
    get_pressure_task_cached_info,
    set_task_id_set,
    get_task_id_set,
)


# pylint:disable=W0718:broad-exception-caught

app = Bottle()
response.content_type = "application/json"
task_id_set_lock = threading.Lock()


@app.route("/")
@app.route("/misc/ping")
def misc_ping():
    """探活接口"""

    return GlobalShare.SUCC_RESP


@app.route("/get_performance_metrics")
def get_performance_metrics():
    """获取性能指标数据"""

    start_time = time.perf_counter()

    net_io_start = psutil.net_io_counters()
    bytes_sent_start = net_io_start.bytes_sent
    bytes_recv_start = net_io_start.bytes_recv

    interval = 1
    cpu_usage = psutil.cpu_percent(interval=interval)
    memory_usage = psutil.virtual_memory().percent

    net_io_end = psutil.net_io_counters()
    bytes_sent_end = net_io_end.bytes_sent
    bytes_recv_end = net_io_end.bytes_recv

    download_speed = (bytes_recv_end - bytes_recv_start) / interval / 1024  # kb/s
    upload_speed = (bytes_sent_end - bytes_sent_start) / interval / 1024  # kb/s

    end_time = time.perf_counter()
    calculation_time = round(end_time - start_time, 2)

    return {
        "cpu_usage": cpu_usage,
        "memory_usage": memory_usage,
        "calculation_time": f"{calculation_time} s",
        "download_speed": f"{download_speed} kb/s",
        "upload_speed": f"{upload_speed} kb/s",
    }


@app.route("/init_and_pull_git_repo", method="POST")
def init_and_pull_git_repo():
    """初始化 git 仓库对象并拉取更新代码"""

    data = request.json
    repo_url = data.get("repo_url")

    repo_name, repo_local_path = init_git_repo(repo_url=repo_url)
    GlobalShare.SUCC_RESP["msg"] = f"{repo_name} init succ: {repo_local_path}"
    return GlobalShare.SUCC_RESP


@app.route("/init_python_env", method="POST")
def init_python_env():
    """初始化 python 环境"""

    data = request.json
    repo_path = data.get("repo_path")
    python_path = data.get("python_path") or "python"

    try:
        venv_python_path = zenki_init_python_env(
            repo_path=repo_path, python_path=python_path
        )
        return {
            "code": HTTPStatus.OK,
            "msg": f"Venv's python interpreter path: {venv_python_path}",
        }
    except CustomException as exc:
        error_note_taker.error(
            f"[init_python_env] exception: {exc}, cause: {exc.__cause__}"
        )
        return {
            "code": HTTPStatus.INTERNAL_SERVER_ERROR,
            "msg": str(exc),
        }


@app.route("/collect_and_report_locust_cases/<repo_name>/<env_name>")
def collect_and_report_locust_cases(repo_name: str, env_name: str):
    """收集并上报 pressure 项目中的 locust 用例"""

    zenki_collect_and_report_locust_cases(repo_name=repo_name, env_name=env_name)
    return GlobalShare.SUCC_RESP


@app.route("/get_locust_files/<repo_name>")
def get_locust_files(repo_name):
    """获取仓库中的 locust 脚本列表"""

    repo_local_path = join(GlobalShare.WORKSPACE_DIR, repo_name)
    locust_files = collect_locust_files(directory_path=repo_local_path)
    return {
        "repo_name": repo_name,
        "repo_local_path": repo_local_path,
        "locust_files": locust_files,
    }


@app.route("/start_locust_master", method="POST")
def start_locust_master():
    """启动 locust master 节点服务"""

    try:
        data = request.json
    except json.decoder.JSONDecodeError as exc:
        return {"code": HTTPStatus.BAD_REQUEST, "exception": str(exc)}

    pressure_task_id = data.get("pressure_task_id")
    repo_name = data.get("repo_name")
    python_path = data.get("python_path") or load_python_interpreter_path(
        project_name=repo_name
    )
    if not python_path:
        return {
            "code": HTTPStatus.BAD_REQUEST,
            "msg": f"python interpreter path not found for {repo_name}",
        }
    locust_file_rel_path = data.get("locust_file_rel_path")
    master_bind_port = data.get("master_bind_port") or find_available_port()
    web_port = data.get("web_port") or find_available_port(
        exclude_port=master_bind_port
    )

    locust_file_path = join(GlobalShare.WORKSPACE_DIR, repo_name, locust_file_rel_path)
    pressure_task_sig = get_pressure_task_sig(pressure_task_id=pressure_task_id)
    log_file_path = join(GlobalShare.LOGS_DIR, f"{pressure_task_sig}_master.log")

    pressure_task_info = get_pressure_task_cached_info(
        pressure_task_id=pressure_task_id
    )
    if pressure_task_info["master"]:
        return {
            "code": HTTPStatus.BAD_REQUEST,
            "msg": f"pressure task {pressure_task_id} master process existed",
        }

    command = (
        f"nohup {python_path} -m locust -f {locust_file_path} --master "
        + f"--web-host={GlobalShare.AGENT_IP} --web-port={web_port} "
        + f"--master-bind-host={GlobalShare.AGENT_IP} --master-bind-port={master_bind_port} "
        + f"> {log_file_path} 2>&1 &"
    )
    locust_master_process = execute_in_backgroud_process(command=command)

    info = {
        "temp_pid": locust_master_process.pid,
        "web_host": GlobalShare.AGENT_IP,
        "web_port": web_port,
        "web_address": f"http://{GlobalShare.AGENT_IP}:{web_port}",
        "master_bind_host": GlobalShare.AGENT_IP,
        "master_bind_port": master_bind_port,
        "features": ["locust", "--master", f"--master-bind-port={master_bind_port}"],
        "log_file_path": log_file_path,
        "command": command,
    }

    note_taker.info(f"[start_locust_master] start to cache master info: {info}")
    cache_master_info(pressure_task_id=pressure_task_id, info=info)
    manage_task_id_set("add", pressure_task_id)

    return {**GlobalShare.SUCC_RESP, **info}


@app.route("/start_locust_worker", method="POST")
def start_locust_worker():
    """启动 locust worker 节点服务"""

    try:
        data = request.json
    except json.decoder.JSONDecodeError as exc:
        return {"code": HTTPStatus.BAD_REQUEST, "exception": str(exc)}

    pressure_task_id = data.get("pressure_task_id")
    repo_name = data.get("repo_name")
    python_path = data.get("python_path") or load_python_interpreter_path(
        project_name=repo_name
    )
    if not python_path:
        return {
            "code": HTTPStatus.INTERNAL_SERVER_ERROR,
            "msg": f"python interpreter path not found for {repo_name}",
        }
    locust_file_rel_path = data.get("locust_file_rel_path")
    master_host = data.get("master_host")
    master_port = data.get("master_port")

    pressure_task_sig = get_pressure_task_sig(pressure_task_id=pressure_task_id)
    log_file_path = join(GlobalShare.LOGS_DIR, f"{pressure_task_sig}_worker.log")
    locust_file_path = join(GlobalShare.WORKSPACE_DIR, repo_name, locust_file_rel_path)
    command = (
        f"nohup {python_path} -m locust -f {locust_file_path} --worker "
        + f"--master-host={master_host} --master-port={master_port} "
        + f"> {log_file_path} 2>&1 &"
    )

    locust_worker_process = execute_in_backgroud_process(command=command)
    temp_pid = locust_worker_process.pid
    info = {
        "command": command,
        "features": ["locust", "--worker", f"--master-port={master_port}"],
        "log_file_path": log_file_path,
        "temp_pid": temp_pid,
    }

    note_taker.info(f"[start_locust_master] start to cache worker info: {info}")
    cache_worker_info(pressure_task_id=pressure_task_id, info=info)

    return {**GlobalShare.SUCC_RESP, **info}


@app.route("/kill_pressure_task_processes", method="POST")
def kill_pressure_task_processes():
    """杀掉压测任务进程"""

    try:
        data = request.json
    except json.decoder.JSONDecodeError as exc:
        return {"code": HTTPStatus.BAD_REQUEST, "exception": str(exc)}

    pressure_task_id = data.get("pressure_task_id")
    option: dict = data.get("option")
    if not option:
        kill_master = True
        kill_workers = True
        kill_workers_count = -1
    else:
        if option["kill_master"] == 1:
            kill_master = True
        if option["kill_workers"] == 1:
            kill_workers = True
            kill_workers_count = option["kill_workers_count"]

    note_taker.info(
        f"[kill_pressure_task_processes] pressure_task_id: {pressure_task_id}, option: {option}, kill_master: {kill_master}, kill_workers: {kill_workers}, kill_workers_count: {kill_workers_count}"
    )
    pressure_task_info = get_pressure_task_cached_info(
        pressure_task_id=pressure_task_id
    )
    if not pressure_task_info:
        return {
            "code": HTTPStatus.NOT_FOUND,
            "msg": f"no task processes found with task id: {pressure_task_id}",
        }

    stopped_pids = {"master": [], "workers": []}

    workers_all_killed = False

    if kill_master:
        try:
            master_info = pressure_task_info["master"]
            note_taker.info(
                f"[kill_pressure_task_processes] master info: {master_info}"
            )
            features = master_info["features"]

            processes = get_matching_processes(features=features)
            for process_info in processes:
                pid = process_info["pid"]
                try:
                    process = psutil.Process(pid)
                    process.kill()
                    stopped_pids["master"].append(pid)
                except psutil.NoSuchProcess:
                    error_note_taker.error(
                        f"[kill_pressure_task_processes] process {pid} does not exist"
                    )
                except Exception as exc:
                    error_note_taker.error(
                        f"[kill_pressure_task_processes] kill master process {pid} error: {exc}"
                    )
                    return {
                        "code": HTTPStatus.INTERNAL_SERVER_ERROR,
                        "msg": f"[kill_pressure_task_processes] kill master process {pid} error: {exc}",
                    }

            update_cached_locust_processes(
                pressure_task_id=pressure_task_id,
                node_name="master",
                node_info={},
            )
        except Exception as exc:
            error_note_taker.error(
                f"[kill_pressure_task_processes] kill master process error: {exc}"
            )
            return {
                "code": HTTPStatus.INTERNAL_SERVER_ERROR,
                "msg": f"[kill_pressure_task_processes] kill master processes error: {exc}",
            }

    if kill_workers:
        try:
            killed_workers_count = 0

            workers_info = pressure_task_info["workers"]
            workers_count = len(workers_info)
            remained_workers_info = []
            for worker_info in workers_info:
                features = worker_info["features"]
                if kill_workers_count < 0 or killed_workers_count < kill_workers_count:
                    processes = get_matching_processes(features=features)

                    for process_info in processes:
                        pid = process_info["pid"]
                        try:
                            process = psutil.Process(pid)
                            process.kill()
                            stopped_pids["workers"].append(pid)
                        except psutil.NoSuchProcess:
                            error_note_taker.error(
                                f"[kill_pressure_task_processes] process {pid} does not exist"
                            )
                        except Exception as exc:
                            error_note_taker.error(
                                f"[kill_pressure_task_processes] kill worker process {pid} error: {exc}"
                            )
                            return {
                                "code": HTTPStatus.INTERNAL_SERVER_ERROR,
                                "msg": f"[kill_pressure_task_processes] kill worker process {pid} error: {exc}",
                            }

                    killed_workers_count += 1
                else:
                    remained_workers_info.append(worker_info)

            update_cached_locust_processes(
                pressure_task_id=pressure_task_id,
                node_name="workers",
                node_info=remained_workers_info,
            )
            workers_all_killed = workers_count == killed_workers_count
        except Exception as exc:
            error_note_taker.error(f"[kill_pressure_task_processes] exception: {exc}")
            return {
                "code": HTTPStatus.INTERNAL_SERVER_ERROR,
                "msg": f"kill workers processes error: {exc}",
            }

    if workers_all_killed:
        note_taker.info(
            f"[kill_pressure_task_processes] start to discard pressure task processes cache: {pressure_task_id}"
        )
        try:
            del_cached_locust_processes(pressure_task_id=pressure_task_id)
            manage_task_id_set(option="discard", task_id=pressure_task_id)
            note_taker.info(
                f"[kill_pressure_task_processes] pressure task processes cache discarded: {pressure_task_id}"
            )
        except Exception as exc:
            error_note_taker.error(
                f"[kill_pressure_task_processes] delete pressure task cache error: {exc}"
            )
            return {
                "code": HTTPStatus.INTERNAL_SERVER_ERROR,
                "msg": f"delete pressure task cache error: {exc}",
            }

    if stopped_pids["master"] or stopped_pids["workers"]:
        note_taker.info(
            f"[kill_pressure_task_processes] stop task processes: {stopped_pids}"
        )
        return {
            **GlobalShare.SUCC_RESP,
            "stopped_pids": stopped_pids,
        }

    return {
        "code": HTTPStatus.NOT_FOUND,
        "msg": f"no pids stopped with task id: {pressure_task_id}",
    }


def manage_task_id_set(option: str, task_id: str):
    """管理任务 id 集合

    Args:
        option (str): add / discard
        task_id (str): 任务 id
    """
    task_id_set_lock.acquire()
    try:
        if option == "add":
            data_set: set = get_task_id_set()
            data_set.add(task_id)
            set_task_id_set(data_set=data_set)
            note_taker.info(f"[manage_task_id_set] add task id to set: {task_id}")
        elif option == "discard":
            data_set: set = get_task_id_set()
            data_set.discard(task_id)
            set_task_id_set(data_set=data_set)
            note_taker.info(f"[manage_task_id_set] discard task id from set: {task_id}")
    finally:
        task_id_set_lock.release()


@app.route("/get_pressure_tasks")
def get_pressure_tasks():
    """获取节点上的压测任务列表"""

    task_id_set: set = get_task_id_set()
    return {
        **GlobalShare.SUCC_RESP,
        "task_id_list": list(task_id_set),
    }


@app.route("/kill_all_locust_processes")
def kill_all_locust_processes():
    """杀掉所有 locust 进程"""

    killed_pids = kill_locust_processes()
    return {
        **GlobalShare.SUCC_RESP,
        "killed_locust_pids": killed_pids,
    }


@app.route("/clear_cache")
def clear_cache():
    """清理缓存"""

    cache.clear()
    return GlobalShare.SUCC_RESP


@app.route("/get_cache_key/<key>")
def get_cache(key):
    """获取缓存的 key 值"""

    locust_processes = cache.get(key=key, default={})
    return {
        **GlobalShare.SUCC_RESP,
        "locust_processes": locust_processes,
    }


@app.route("/pull_repo/<repo_name>")
def pull_repo(repo_name: str):
    """收集 locust 文件信息"""

    repo_local_path, repo_git_url = git_pull(repo_name=repo_name)
    return {
        **GlobalShare.SUCC_RESP,
        "repo_local_path": repo_local_path,
        "repo_git_url": repo_git_url,
    }


@app.route("/get_cached_pressure_task_info")
def get_cached_pressure_task_info():
    """获取缓存的压测任务信息"""

    return {
        **GlobalShare.SUCC_RESP,
        "cached_locust_processes": get_cached_locust_processes(),
    }


@app.route("/locust/exceptions/<pressure_task_id>")
def get_exceptions(pressure_task_id):
    """locust web 接口封装: 获取异常信息"""

    pressure_task_info = get_pressure_task_cached_info(
        pressure_task_id=pressure_task_id
    )
    if not pressure_task_info:
        return {
            "code": HTTPStatus.BAD_REQUEST,
            "msg": f"Pressure task not found with id: {pressure_task_id}",
        }
    web_port = pressure_task_info["master"]["web_port"]
    url = f"http://{GlobalShare.AGENT_IP}:{web_port}/exceptions"
    return httpx.get(url=url).json()


@app.route("/locust/stats/requests/<pressure_task_id>")
def get_stats_requests(pressure_task_id):
    """locust web 接口封装: 获取请求实时状态"""

    pressure_task_info = get_pressure_task_cached_info(
        pressure_task_id=pressure_task_id
    )
    if not pressure_task_info:
        return {
            "code": HTTPStatus.BAD_REQUEST,
            "msg": f"Pressure task not found with id: {pressure_task_id}",
        }
    web_port = pressure_task_info["master"]["web_port"]
    url = f"http://{GlobalShare.AGENT_IP}:{web_port}/stats/requests"
    return httpx.get(url=url).json()


@app.route("/locust/swarm", method="POST")
def swarm():
    """locust web 接口封装: 编辑并启动任务"""

    try:
        data = request.json
    except json.decoder.JSONDecodeError as exc:
        return {"code": HTTPStatus.BAD_REQUEST, "exception": str(exc)}
    note_taker.info(f"[swarm] receive request info {data}")
    pressure_task_id = data["pressure_task_id"]
    host = data["host"]
    user_count = data["user_count"]
    spawn_rate = data["spawn_rate"]

    pressure_task_info = get_pressure_task_cached_info(
        pressure_task_id=pressure_task_id
    )
    note_taker.info(f"[swarm] pressure_task_info: {pressure_task_info}")
    if not pressure_task_info:
        return {
            "code": HTTPStatus.INTERNAL_SERVER_ERROR,
            "msg": f"Pressure task not found with id: {pressure_task_id}",
        }
    if "master" not in pressure_task_info:
        return {
            "code": HTTPStatus.INTERNAL_SERVER_ERROR,
            "msg": f"Master server info not found with task id: {pressure_task_id}",
        }
    try:
        web_port = pressure_task_info["master"]["web_port"]
        url = f"http://{GlobalShare.AGENT_IP}:{web_port}/swarm"
        data = {"host": host, "user_count": user_count, "spawn_rate": spawn_rate}
        ret = httpx.post(
            url=url,
            data=data,
        )
        http_debug(url=url, data=data, ret=ret.text)
        json_r = ret.json()
        return json_r
    except Exception as exc:
        error_note_taker.error(
            f"[swarm] request error: {exc}\nurl: {url}\ndata:{data}\nret:{ret}"
        )
        raise exc


@app.route("/locust/stop/<pressure_task_id>")
def stop_pressure_test(pressure_task_id):
    """locust web 接口封装: 停止压测任务"""

    pressure_task_info = get_pressure_task_cached_info(
        pressure_task_id=pressure_task_id
    )
    if not pressure_task_info:
        return {
            "code": HTTPStatus.BAD_REQUEST,
            "msg": f"Pressure task not found with id: {pressure_task_id}",
        }
    web_port = pressure_task_info["master"]["web_port"]
    url = f"http://{GlobalShare.AGENT_IP}:{web_port}/stop"
    return httpx.get(url=url).json()


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Perf blaster agent starter")
    parser.add_argument("-m", "--mode", type=int, help="operation mode", default=0)
    parser.add_argument(
        "-r", "--repo_name", type=str, help="repository name", default="pressure"
    )
    parser.add_argument("-e", "--env_name", type=str, help="env name", default="dev")
    args = parser.parse_args()

    del_all_cacahed_locust_processes()
    machine_info_report(env_name="dev")
    init_workspace_dir()
    init_logs_dir()
    for _, the_repo_url in GlobalShare.REPOS.items():
        init_git_repo(repo_url=the_repo_url)

    if args.mode == 0:
        # 正式模式
        note_taker.info("[main] run in formal mode")
        httpserver.serve(
            app,
            host="0.0.0.0",
            port=GlobalShare.AGENT_PORT,
        )
    elif args.mode == 1:
        # 调试模式
        note_taker.info("[main] run in debug mode")
        run(app, host="0.0.0.0", port=GlobalShare.AGENT_PORT, debug=True, reloader=True)
    elif args.mode == 2:
        # 收集上报用例模式
        note_taker.info("[main] run in collect mode")
        zenki_collect_and_report_locust_cases(
            repo_name=args.repo_name, env_name=args.env_name
        )
