#!/usr/bin/env python3

"""
Author: Yanrui Hu
Date: 2023/4/23
Description: 在部署阶段进行log的记录
Keywords: Ubuntu, Container, Docker, 毕设, log记录
"""

import os
import time
import json

REPO_PATH = os.path.dirname(os.path.abspath(__file__))


def update_log_meta_info(image):
    """更新log 文件的元信息"""
    lmt = ""  # Last Modified Time
    liv = ""  # Last Image Version
    civ = ""  # Current Image Version

    cur_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
    lmt = f"Last Modified Time: {cur_time}\n"

    if image == "null":
        civ = f"Current Image Version: null\n"
    else:
        civ = f"Current Image Version: \"{image}\"\n"

    # 从 .database/last-oper.log 文件中获取上次的 civ，设置本次的 liv
    with open(f"{REPO_PATH}/.database/last-oper.log", 'r') as f:
        lines = f.readlines()
        line = lines[2]
        liv = f"Last Image Version: {line.split()[3]}\n"

    # 设置 last-oper.log 的liv
    with open(f"{REPO_PATH}/.database/last-oper.log", 'r') as f:
        lines = f.readlines()
        lines[0] = lmt
        lines[1] = liv
        lines[2] = civ

        with open(f"{REPO_PATH}/.database/last-oper.log", 'w') as f:
            f.writelines(lines)

    # 设置 status.log 的civ
    with open(f"{REPO_PATH}/.database/status.log", 'r') as f:
        lines = f.readlines()
        lines[0] = lmt
        lines[1] = civ

        with open(f"{REPO_PATH}/.database/status.log", 'w') as f:
            f.writelines(lines)

    with open(f"{REPO_PATH}/.database/operations.log", 'r') as f:
        lines = f.readlines()
        lines[0] = lmt
        lines[1] = liv
        lines[2] = civ

        with open(f"{REPO_PATH}/.database/operations.log", 'w') as f:
            f.writelines(lines)


def inspect(image: str, quiet=False) -> "list[str]":
    """inspect image and get all `diff` dirs, but lowest excluded"""
    if not quiet:
        print(f"正在获取镜像 {image} 的详细信息")

    os.system(
        f"podman inspect {image} > {REPO_PATH}/inspect.json"
    )  # TODO: inspect.json 的处理

    with open(f"{REPO_PATH}/inspect.json", 'r') as f:
        data = json.load(f)

    upper_diff = data[0]['GraphDriver']['Data']['UpperDir']
    if not quiet:
        print("\nIn inspect, upper_diff:", upper_diff)

    diff_dirs = data[0]['GraphDriver']['Data']['LowerDir'].split(':')
    del diff_dirs[-1]  # 删除最底下的一层对应的 diff 文件夹路径

    diff_dirs.reverse()
    diff_dirs.append(upper_diff)
    if not quiet:
        print("In inspect, diff_dirs:", diff_dirs, '\n')

    return diff_dirs


def clean_log_file(logfile):
    """清空 logfile 文件
    当第二次遇到以 '=' 开头的行时, 就清空之后的所有行
    """
    reserve_lines = []
    with open(f"{REPO_PATH}/.database/{logfile}", 'r') as f:
        lines = f.readlines()
        pos1 = 0
        for i in range(len(lines)):
            if lines[i].startswith("="):
                pos1 = i
                break
        assert pos1 != 0, "last-oper.log 文件中没有第一个 '=' 开头的行"

        pos2 = 0
        for i in range(pos1 + 1, len(lines)):
            if lines[i].startswith("="):
                pos2 = i
                break
        assert pos2 != 0, "last-oper.log 文件中没有第二个 '=' 开头的行"

        reserve_lines = lines[: pos2 + 1]

    with open(f"{REPO_PATH}/.database/{logfile}", 'w') as f:
        f.writelines(reserve_lines)


def clear_all_log_files(quite=False):
    """清空所有的 log 文件"""
    prompt = """正在执行敏感操作，您应当清楚以下内容:
    1. 清空所有的 log 文件将丢弃在部署过程中保留的日志记录, 无法进行 undo 操作
    2. 此操作一般只用于在初次使用程序时清空无关日志，它们可能是被上传到仓库中的演示材料
    """
    if not quite:
        print(prompt)
        while True:
            input_str = input("是否继续？[y/n]: ")
            if input_str == "y" or input_str == "Y":
                break
            if input_str == "n" or input_str == "N":
                return

    log_files = ["status.log", "last-oper.log", "operations.log"]

    for logfile in log_files:
        clean_log_file(logfile)

    update_log_meta_info("null")


def update_operations_log():
    """更新 operations.log 文件"""
    last_opers = []
    with open(f"{REPO_PATH}/.database/last-oper.log", 'r') as f:
        lines = f.readlines()
        pos1 = 0
        for i in range(len(lines)):
            if lines[i].startswith("="):
                pos1 = i
                break
        assert pos1 != 0, "last-oper.log 文件中没有第一个 '=' 开头的行"

        pos2 = 0
        for i in range(pos1 + 1, len(lines)):
            if lines[i].startswith("="):
                pos2 = i
                break
        assert pos2 != 0, "last-oper.log 文件中没有第二个 '=' 开头的行"

        last_opers = lines[pos2 + 1 :]

    if last_opers == []:
        return
    # 追加到 operations.log 文件中
    with open(f"{REPO_PATH}/.database/operations.log", 'a') as f:
        f.writelines(last_opers)


def update_status_log(include_undo=True):
    """更新 status.log 文件
    有两种情况:
    1. include_undo=True, 说明 last-oper.log 中包含了undo操作, 需要将剩余的 “非undo操作记录” 追加到status.log中
        功能项“undo操作的所有操作记录在status.log中删除” 已经被包含在了 autodeploy.undo_last_apply() 中
    2. include_undo=False, 说明 last-oper.log 中不包含undo操作, 需要将 last-oper.log 中的所有操作记录追加到status.log中
    """
    if not include_undo:
        # 打开 last-oper.log 文件, 获取最后一次的操作记录
        last_opers = []
        with open(f"{REPO_PATH}/.database/last-oper.log", 'r') as f:
            lines = f.readlines()
            pos1 = 0
            for i in range(len(lines)):
                if lines[i].startswith("="):
                    pos1 = i
                    break
            assert pos1 != 0, "last-oper.log 文件中没有第一个 '=' 开头的行"

            pos2 = 0
            for i in range(pos1 + 1, len(lines)):
                if lines[i].startswith("="):
                    pos2 = i
                    break
            assert pos2 != 0, "last-oper.log 文件中没有第二个 '=' 开头的行"

            last_opers = lines[pos2 + 1 :]

        if last_opers == []:
            return
        # 将剩余的操作记录追加到 status.log 文件中
        with open(f"{REPO_PATH}/.database/status.log", 'a') as f:
            f.writelines(last_opers)

    else:
        # 打开 last-oper.log 文件, 获取最后一次的操作记录中 APPLY 的部分
        last_opers = []
        with open(f"{REPO_PATH}/.database/last-oper.log", 'r') as f:
            lines = f.readlines()
            pos1 = 0
            for i in range(len(lines)):
                if lines[i].startswith("="):
                    pos1 = i
                    break
            assert pos1 != 0, "last-oper.log 文件中没有第一个 '=' 开头的行"
            pos2 = 0
            for i in range(pos1 + 1, len(lines)):
                if lines[i].startswith("="):
                    pos2 = i
                    break
            assert pos2 != 0, "last-oper.log 文件中没有第二个 '=' 开头的行"

            pos3 = 0
            for i in range(pos2 + 1, len(lines)):
                if lines[i].startswith("APPLY"):
                    pos3 = i
                    break
            if pos3 != 0:
                last_opers.append("\n")
                for i in range(pos3, len(lines)):
                    last_opers.append(lines[i])

        if last_opers == []:
            return
        # 将剩余的操作记录追加到 status.log 文件中
        with open(f"{REPO_PATH}/.database/status.log", 'a') as f:
            f.writelines(last_opers)


# 测试上面的代码功能是否符合预期
# if __name__ == "__main__":
#     update_status_log(include_undo=True)


def append_log_file(logfile, content):
    """在 logfile 文件中追加内容"""
    with open(f"{REPO_PATH}/.database/{logfile}", 'a') as f:
        f.write(content)


def read_file_reversely(filepath) -> list[str]:
    """逆序读取文件"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        lines.reverse()
        return lines


def read_log(filepath, quiet=False):
    """读取一个log文件, 返回一个list, 每个元素是一个 APPLY 或者 REVERT 记录"""
    record_num = 0
    with open(filepath, "r") as f:
        lines = f.readlines()
        res = []
        for line in lines:
            if line.startswith("APPLY_DIFF_DIR") or line.startswith("REVERT_APPLY"):
                record_num += 1
                if not quiet:
                    print(f"Log {record_num}: {line}")
                res.append(line)
        return res


def get_all_lineno_of_specific_string(filepath, string):
    """获取指定文件中以指定字符串开头的的所有行号"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        res = []
        for i, line in enumerate(lines):
            if line.startswith(string):
                res.append(i + 1)  # 行号从1开始
        return res


def read_lines_range(filepath, start, end):
    """读取指定文件的指定行范围的内容"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        return lines[start - 1 : end]


def del_lines_range(filepath, start, end):
    """删除指定文件的指定行范围的内容"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        del lines[start - 1 : end]
        with open(filepath, "w") as f:
            f.writelines(lines)


def get_apply_blocks(filepath, quiet=False):
    """首先, 通过给出的文件路径, 读取出每一个APPLY块的起始行与终止行的行号
    然后, 通过这些行号, 读取出每一个APPLY块的内容"""
    starts = get_all_lineno_of_specific_string(filepath, "APPLY_DIFF_DIR")
    ends = get_all_lineno_of_specific_string(filepath, "END_APPLY")
    assert len(starts) == len(ends), print("APPLY_DIFF_DIR 和 END_APPLY 的数量不一致")
    if not quiet:
        print("starts:", starts, "ends:", ends)
    res = []
    for i in range(0, len(starts), 1):
        res.append(read_lines_range(filepath, starts[i], ends[i]))
    return res


def get_last_apply_block(filepath, quiet=False):
    """获取最后一个 APPLY_DIFF_DIR 块的内容"""
    starts = get_all_lineno_of_specific_string(filepath, "APPLY_DIFF_DIR")
    ends = get_all_lineno_of_specific_string(filepath, "END_APPLY")
    assert len(starts) == len(ends), print("APPLY_DIFF_DIR 和 END_APPLY 的数量不一致")
    if not quiet:
        print("starts:", starts, "ends:", ends)
    return read_lines_range(filepath, starts[-1], ends[-1])


def get_last_end_block(filepath, quiet=False) -> list[str]:
    """获取文件中的最后一个 END 块"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        lines.reverse()

    block = []
    assert lines[0].startswith("END")
    identifier = lines[0][4:-1]
    assert identifier == "REVERSE" or identifier == "APPLY"
    if not quiet:
        print("identifier: ", identifier)

    for line in lines:
        block.append(line)
        if line.startswith(identifier):
            break

    return block


def del_last_end_block(filepath, quiet=False):
    """删除文件中的最后一个 END 块"""

    with open(filepath, "r") as f:
        lines = f.readlines()
        assert lines[-1].startswith("END")

        identifier = lines[-1][4:-1]
        assert identifier == "REVERSE" or identifier == "APPLY"
        if not quiet:
            print("identifier: ", identifier)

        while True:
            lines.pop()
            if lines[-1].startswith("END") or lines[-1].startswith("=="):
                break

        with open(filepath, 'w') as f:
            f.writelines(lines)


def get_last_line_of_file(filepath) -> str:
    """获取文件的最后一行"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        return lines[-1]


def del_last_line_of_file(filepath):
    """删除文件的最后一行"""
    with open(filepath, "r") as f:
        lines = f.readlines()
        del lines[-1]
        with open(filepath, "w") as f:
            f.writelines(lines)


def del_last_blank_line_of_file(filepath):
    """删除文件的最后一行, 如果最后一行为空行"""
    with open(filepath, "r") as f:
        lines = f.readlines()

        if lines[-1].strip() == "":
            del lines[-1]  # 删除最后一行

        with open(filepath, "w") as f:
            f.writelines(lines)


def get_last_deploy_target_dir():
    """获取上一次部署目标目录"""
    with open(f"{REPO_PATH}/.database/status.log", "r") as f:
        lines = f.readlines()
        for line in lines:
            if line.startswith("APPLY_DIFF_DIR"):
                dir_path = line.split(" -> ")[-1].strip().strip("'\"")
                return dir_path.rstrip("/") if dir_path != '/' else '/'

    return None


# if __name__ == '__main__0':
#     # 读取log文件
#     log_file = sys.argv[1]
#     res = read_log(log_file)
#     print(res)


# if __name__ == "__main__1":
#     # 1. 创建一个logger
#     logger = logging.getLogger()
#     logger.setLevel(logging.DEBUG)  # Log等级总开关

#     # 2. 创建一个handler, 用于写入日志文件
#     date = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
#     log_path = os.path.dirname(os.getcwd()) + '/log/'
#     log_name = log_path + date + '.log'
#     logfile = log_name

#     if not os.path.exists(log_path):
#         os.makedirs(log_path)
#         logger.info("Create log path: %s", log_path)
#         logger.info("Create log file: %s", log_name)
#     else:
#         logger.info("Log path: %s", log_path)
#         logger.info("Log file: %s", log_name)

#     fh = logging.handlers.RotatingFileHandler(
#         logfile, mode='a', maxBytes=1024 * 1024 * 5, backupCount=20, encoding='utf-8'
#     )
#     fh.setLevel(logging.DEBUG)  # 输出到file的log等级的开关

#     # 3. 再创建一个handler, 用于输出到控制台
#     ch = logging.StreamHandler()
#     ch.setLevel(logging.DEBUG)  # 输出到console的log等级的开关

#     # 4. 定义handler的输出格式
#     formatter = logging.Formatter(
#         "%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s"
#     )
#     fh.setFormatter(formatter)
#     ch.setFormatter(formatter)

#     # 5. 给logger添加handler
#     logger.addHandler(fh)
#     logger.addHandler(ch)

#     # 6. 记录一条日志
#     logger.info("Start print log")
#     logger.debug("Do something")
#     logger.warning("Something maybe fail.")
#     logger.info("Finish")

#     # 7. 关闭打开的文件
#     fh.close()
#     ch.close()
