import logging
import os

from mongoTools import clean_mongo
from seleniumtools import browserTool
from sqlTools import danji_mysql_tools, lianji_mysql_tools
from sqlTools.danji_mysql_tools import clean_ob_nrlj, ob_nrlj_db, recreate_db_airflow
from sshtools.SshTool import SshTool
from tools.configmanager import config
from tools.file_tool import FileTool

danji_ssh = SshTool(ssh_ip=config.get('dan_ji.ip'), ssh_user=config.get('dan_ji.user'),
                    ssh_pwd=config.get('dan_ji.pwd'))
lianji_ssh = SshTool(ssh_ip=config.get('lian_ji.ip'), ssh_user=config.get('lian_ji.user'),
                     ssh_pwd=config.get('lian_ji.pwd'))


def danji_pick_mysql_ibd(ssh):
    """
    把删除过大量记录的大表整理下磁盘碎片，减少磁盘空间占用
    """
    # tail -n +2：从第2行开始输出（跳过第1行）。
    # head -n 10：限制为前10个文件。
    result = ssh.run_command("ls -lhS /root/nrlj/mysql/data/neirongdb | tail -n +2 | head -n 10 | awk '{print $9}'")
    for table in result[1]:
        ssh.run_command(f"ls -lh /root/nrlj/mysql/data/neirongdb/{table}")
        table_name = table.replace(".ibd\n", "")
        if "_dict_" in table_name:
            continue
        danji_mysql_tools.optimize_table(table_name=table_name)


def lianji_pick_mysql_ibd(ssh, db, db_instance):
    """
    把删除过大量记录的大表整理下磁盘碎片，减少磁盘空间占用
    db=lianji_mysql_tools.nrljdevdb
    db=lianji_mysql_tools.ljrhdb
    """
    # tail -n +2：从第2行开始输出（跳过第1行）。
    # head -n 10：限制为前10个文件。
    cmd = "ls -lhS /root/nrljlj/mysql/data/" + db.get("dbname") + " | tail -n +2 | head -n 10 | awk '{print $9}'"
    logging.info(cmd)
    result = ssh.run_command(cmd)
    for table in result[1]:
        ssh.run_command(f"ls -lh /root/nrljlj/mysql/data/{db.get("dbname")}/{table}")
        table_name = table.replace(".ibd\n", "")
        lianji_mysql_tools.optimize_table(table_name=table_name, db=db, db_instance=db_instance)


# 清理danji所有数据，重新让danji发送（目前 neo4j、kafka 使用浏览器处理的！）
def clean_danji_all_and(ssh_client=danji_ssh):
    logging.info("清理danji所有数据")
    logging.info("(1)停止danji服务，清理文件数据:")
    # 当前文件目录
    current_dir = os.path.dirname(os.path.abspath(__file__))
    root_dir = os.path.dirname(current_dir)
    target_dir = os.path.join(root_dir, "shellscripts")
    target_file = os.path.join(target_dir, "clean_all_danji.sh")
    if not os.path.exists(target_file):
        logging.error(f"{target_file}不存在！")
        return
    clean_all_danji = FileTool.read_bash_file(target_file)
    ssh_client.run_command(clean_all_danji)
    logging.info("(2)清理msyql")
    danji_mysql_tools.mysql_drop_split()
    logging.info("mysql_已drop分表")
    danji_mysql_tools.mysql_script(sql_file="sql/clean_neirongdb.sql")
    logging.info("mysql_已清理 neirongdb")
    # 清理binlog
    danji_mysql_tools.mysql_clean_binlog()
    # 找到大文件
    danji_pick_mysql_ibd(ssh_client)
    logging.info("mysql_已清理binlog")
    logging.info("(3)重新启动服务")
    # 启动服务，开始跑bcp
    run_danji = """
    cd /root/nrlj;
    docker compose up -d;"""
    ssh_client.run_command(run_danji)
    logging.info("danji所有数据清理完毕，请把zip放入用户名下的bcp路径，重新跑数据！")


# 清理单机2.0的数据
# 停止nrlj_web服务
# nrlj_web 下的 airflow 文件夹需要删除
# 备份bcp包
# /data/bcp 文件夹下的内容删除
# ob中，nrlj租户，清空 dj_  lj_ 开头的表即可
# ob中，airflow 租户下的airflow 数据库删除所有表
def clean_danji2_all_and(ssh_client=danji_ssh):
    logging.info("清理danji所有数据")
    logging.info("(1)停止danji后端，停止模型，清理文件数据:")
    # 当前文件目录
    current_dir = os.path.dirname(os.path.abspath(__file__))
    root_dir = os.path.dirname(current_dir)
    target_dir = os.path.join(root_dir, "shellscripts")
    target_file = os.path.join(target_dir, "clean_all_danji2.sh")
    if not os.path.exists(target_file):
        logging.error(f"{target_file}不存在！")
        return
    clean_all_danji = FileTool.read_bash_file(file_path=target_file)
    ssh_client.run_command(clean_all_danji)
    logging.info("(2)清理OceanBase-nrlj租户")
    clean_ob_nrlj()
    logging.info("OceanBase-airflow租户，删除airflow库")
    recreate_db_airflow()
    logging.info("(3)重新启动服务")
    # 启动服务，start.sh 如果是清空了airflow的会自动初始化
    run_danji = """
    cd /root/nrlj;
    docker compose up -d;
    cd /root/nrlj/nrlj_web;
    ./start.sh;
    """
    ssh_client.run_command(run_danji)
    logging.info("danji清理完毕，请把bcp包放入/data/bcp/用户名/bcp_upload/下，解析bcp包！")


# 清理lianji所有数据，重新让danji发送
def clean_lianji_resend(ssh_client=lianji_ssh):
    logging.info("清理lianji所有数据，重新让danji发送")
    logging.info("(1)浏览器中，清理lianji首页数据")
    browserTool.clean_shouye(config.get('lian_ji.lianji.url'), config.get('lian_ji.lianji.user'),
                             config.get('lian_ji.lianji.pwd'))

    logging.info("(2)停掉lianji和融合服务")
    stop_dev_program = """
cd /root/nrljlj/;
docker compose stop nrljlj-java fuse-start extract fuse-executor fuse-neo4j-executor;
rm -rf /root/nrljlj/nrljlj-java/nrljlj/*;
rm -rf /root/nrljlj/nrljlj-java/filepath/*;
rm /root/nrljlj/tensorrt_api/face/db.npy ;
rm /root/nrljlj/tensorrt_api/voiceprint/db.npy;
> /root/nrljlj/mysql/data/slow-query.log;
> /root/nrljlj/mysql/data/slow-query.lo;
> /root/nrljlj/nrljlj-java/console.log
rm -rf /root/nrljlj/fuse-*/logs/*;
rm -rf /root/nrljlj/extract/extract-executor/logs/*;
rm -rf /root/nrljlj/tensorrt_api/logs/*;
    """
    ssh_client.run_command(stop_dev_program)
    logging.info("(3)停止danjirunO的发送")
    danji_mysql_tools.mysql_script(sql_file="sql/stopSendOnline.sql")

    logging.info("(4)清理nrljdevdb")
    lianji_mysql_tools.mysql_dev_script(sql_file="sql/clean_devdb.sql")
    lianji_pick_mysql_ibd(ssh_client, lianji_mysql_tools.nrljdevdb, lianji_mysql_tools.mysql_lianji_dev)

    logging.info("(5)清理ljrhdb")
    lianji_mysql_tools.mysql_rh_script(sql_file="sql/clean_ljrhdb.sql")
    lianji_pick_mysql_ibd(ssh_client, lianji_mysql_tools.ljrhdb, lianji_mysql_tools.mysql_lianji_rh)

    # 还有个neirongdb清理
    lianji_pick_mysql_ibd(ssh_client, lianji_mysql_tools.neirongdb, lianji_mysql_tools.mysql_neirongdb)
    # 清理binlog
    lianji_mysql_tools.mysql_clean_binlog()

    logging.info("(6) 清理 mongo 的数据")
    clean_mongo.clean_mongodb(config.get('lian_ji.ip'), config.get('lian_ji.mongo.port'),
                              config.get('lian_ji.mongo.user'), config.get('lian_ji.mongo.pwd'), )

    logging.info("(7)浏览器中，清理neo4j数据")
    browserTool.clean_neo4j(config.get('lian_ji.neo4j.url'), config.get('lian_ji.neo4j.pwd'), )
    logging.info("(8)浏览器中，清理kafka数据")
    browserTool.clean_kfka(config.get('lian_ji.kafdrop.url'))

    logging.info("(9)启动lianji和融合服务")
    run_lianji = """
cd /root/nrljlj/;
docker compose up -d;
        """

    ssh_client.run_command(run_lianji)

    logging.info("(9)让runO重新发送数据")
    danji_mysql_tools.mysql_script(sql_file="sql/resendOnline.sql")


# 清理联机2.0的数据
def clean_lianji2_resend(ssh_client=lianji_ssh):
    logging.info("清理lianji所有数据")
    logging.info("(0)停止danjirunO的发送")
    danji_mysql_tools.mysql_script(db_instance=ob_nrlj_db, sql_file="sql/ob_stopSendOnline.sql")

    logging.info("(1)浏览器中，清理lianji首页数据")
    browserTool.clean_shouye(config.get('lian_ji.lianji.url'), config.get('lian_ji.lianji.user'),
                             config.get('lian_ji.lianji.pwd'))

    logging.info("(2)停掉lianji和融合服务")
    stop_dev_program = """
    cd /root/nrljlj/;
    docker compose stop nrljlj-java fuse-start extract fuse-executor fuse-neo4j-executor;
    rm -rf /root/nrljlj/nrljlj-java/nrljlj/*;
    rm -rf /data/nrljlj/nrljlj-java/nrljlj/*;
    rm -rf /root/nrljlj/nrljlj-java/filepath/*;
    find /root/nrljlj/tensorrt_api -name db.npy -exec rm {} \;
    > /root/nrljlj/mysql/data/slow-query.log;
    > /root/nrljlj/mysql/data/slow-query.lo;
    > /root/nrljlj/nrljlj-java/console.log
    rm -rf /root/nrljlj/fuse-*/logs/*;
    rm -rf /root/nrljlj/extract/extract-executor/logs/*;
    rm -rf /root/nrljlj/tensorrt_api/logs/*;
    """
    ssh_client.run_command(stop_dev_program)
    logging.info("(3)清理nrljdevdb")
    lianji_mysql_tools.mysql_dev_script(sql_file="sql/clean_devdb.sql")
    lianji_pick_mysql_ibd(ssh_client, lianji_mysql_tools.nrljdevdb, lianji_mysql_tools.mysql_lianji_dev)

    logging.info("(4)清理ljrhdb")
    lianji_mysql_tools.mysql_rh_script(sql_file="sql/clean_ljrhdb.sql")
    lianji_pick_mysql_ibd(ssh_client, lianji_mysql_tools.ljrhdb, lianji_mysql_tools.mysql_lianji_rh)

    # 清理binlog
    lianji_mysql_tools.mysql_clean_binlog()

    logging.info("(5) 清理 mongo 的数据")
    clean_mongo.clean_mongodb(config.get('lian_ji.ip'), config.get('lian_ji.mongo.port'),
                              config.get('lian_ji.mongo.user'), config.get('lian_ji.mongo.pwd'), )

    logging.info("(6)浏览器中，清理neo4j数据")
    browserTool.clean_neo4j(config.get('lian_ji.neo4j.url'), config.get('lian_ji.neo4j.pwd'), )
    logging.info("(7)浏览器中，清理kafka数据")
    browserTool.clean_kfka(config.get('lian_ji.kafdrop.url'))

    logging.info("(8)重启lianji和融合服务")
    run_lianji = """
    cd /root/nrljlj/;
    docker compose up -d;
        """
    ssh_client.run_command(run_lianji)

    logging.info("(9)让runO重新发送数据")
    danji_mysql_tools.mysql_script(db_instance=ob_nrlj_db, sql_file="sql/ob_resendOnline.sql")
    logging.info("默认单机每次发送给联机的任务，3分钟的周期，可以自行访问airflow提前触发")


def defragment_lianji_mysql():
    lianji_pick_mysql_ibd(lianji_ssh, lianji_mysql_tools.nrljdevdb, lianji_mysql_tools.mysql_lianji_dev)
    lianji_pick_mysql_ibd(lianji_ssh, lianji_mysql_tools.ljrhdb, lianji_mysql_tools.mysql_lianji_rh)
    lianji_pick_mysql_ibd(lianji_ssh, lianji_mysql_tools.neirongdb, lianji_mysql_tools.mysql_neirongdb)
    lianji_mysql_tools.mysql_clean_binlog()


def defragment_danji_mysql():
    # 清理binlog
    danji_mysql_tools.mysql_clean_binlog()
    # 找到大文件
    danji_pick_mysql_ibd(danji_ssh)
