# encoding=utf-8

"""IntervalConvertMain.py

    2024.3.28 此处忽略文件减少的情况
"""
import logging
import psutil
import os
import cv2 as cv
import numpy as np
import time
import traceback
from PIL import Image
from AppConfig import *
from IntervalSql import create_conn, insert_name_db, query_name_db, delete_name_db
from ScanCache import ScanCache
from DrScanTools import DrScanTools

sql_tool = create_conn()
cache_tool = ScanCache([])
scan_tool = DrScanTools(FROM_PATH)


def convert_one(from_file: str, to_file: str, quality: int = 80):
    name, ext = os.path.splitext(os.path.basename(to_file))

    logging.info("Use Pillow to decode")
    ori_mat = Image.open(from_file)
    ori_mat.save(to_file, quality=quality)

    # 低速硬盘处理方法
    time.sleep(SLEEP_TIME)

    if not os.path.exists(to_file):
        logging.info("Use opencv to decode")
        ori_mat = cv.imdecode(np.fromfile(from_file, dtype=np.uint8), cv.IMREAD_COLOR)
        res, encode_mat = cv.imencode(ext, ori_mat, [int(cv.IMWRITE_JPEG_QUALITY), quality])
        encode_mat.tofile(to_file)


def convert_list(files: list[str]):
    for each_file in files:
        str_split = each_file.split("-")
        # 时间-姓名-左右
        t, n, l = str_split[0], str_split[1], str_split[2]

        # 文件来源
        from_path = os.path.join(FROM_PATH, each_file)

        # 文件生成路径
        dst_folder = os.path.join(TO_PATH, n, t)
        os.makedirs(dst_folder, exist_ok=True)

        if l.lower() == "left":
            dst_path = os.path.join(dst_folder, DST_NAME_TEMPLATE.format(LEFT_IDENTY))
        else:
            dst_path = os.path.join(dst_folder, DST_NAME_TEMPLATE.format(RIGHT_IDENTY))

        # 转换
        convert_one(from_path, dst_path)

        # 确保存在，不存在报错弹出
        logging.info("Convert done, file size: {}MB -> {}MB".format(
            os.path.getsize(from_path) / (1024 * 1024), os.path.getsize(dst_path) / (1024 * 1024)
        ))

        # 保存记录
        id = insert_name_db(sql_tool, each_file)
        logging.info("Insert {} to sql done, data id: {}".format(each_file, id))


def convert_with_ori(files: list[str]):
    for each_file in files:
        str_split = each_file.split("-")
        # 时间-姓名-左右
        t, n, l = str_split[0], str_split[1], str_split[2]

        # 文件来源
        from_path = os.path.join(FROM_PATH, each_file)

        # 文件生成路径
        dst_folder = os.path.join(TO_PATH, n, t)
        os.makedirs(dst_folder, exist_ok=True)

        # 判断是否为原图
        each_label = 1 if os.path.splitext(each_file)[0].endswith('s') else 0

        if l.lower() == "left" and each_label == 0:
            dst_path = os.path.join(dst_folder, DST_NAME_TEMPLATE.format(LEFT_IDENTY))
        elif l.lower() == "left" and each_label == 1:
            dst_path = os.path.join(dst_folder, DST_NAME_S_TEMPLATE.format(LEFT_IDENTY))
        elif l.lower() == "right" and each_label == 0:
            dst_path = os.path.join(dst_folder, DST_NAME_TEMPLATE.format(RIGHT_IDENTY))
        else:
            dst_path = os.path.join(dst_folder, DST_NAME_S_TEMPLATE.format(RIGHT_IDENTY))

        # 转换
        convert_one(from_path, dst_path)

        # 低速硬盘处理方法
        time.sleep(SLEEP_TIME)

        logging.info("Convert done, file size: {}MB -> {}MB".format(
            os.path.getsize(from_path) / (1024 * 1024), os.path.getsize(dst_path) / (1024 * 1024)
        ))

        # 保存记录
        id = insert_name_db(sql_tool, each_file)
        logging.info("Insert {} to sql done, data id: {}".format(each_file, id))


def delete_list(files: list[str]):
    for each_file in files:
        delete_name_db(sql_tool, each_file)


def main_init():
    # 读取数据库
    name_list = query_name_db(sql_tool)

    logging.info("Read sql cache: {}".format(name_list))

    # 刷入全局缓存
    cache_tool.update_file_cache(name_list)


def main_loop():
    # 读取筛选列表
    if NEED_ORI == 0:
        new_file_list = scan_tool.filter_list()
    else:
        new_file_list = scan_tool.get_list()

    # 比对全局缓存
    d1, d2 = cache_tool.compare_cache(new_file_list)

    # 变化删除
    if len(d1) != 0:
        logging.info("Next file lost: {}, delete db".format(d1))
        t1 = time.perf_counter()
        delete_list(d1)
        t2 = time.perf_counter()
        logging.info("Delete Done, cost: {}".format(t2 - t1))

    # 开始转换
    if len(d2) != 0:
        logging.info("Find new file generate: {}".format(d2))
        t1 = time.perf_counter()
        if NEED_ORI == 0:
            convert_list(d2)
        else:
            convert_with_ori(d2)
        t2 = time.perf_counter()
        logging.info("Convert Done, cost: {}".format(t2 - t1))

    cache_tool.update_file_cache(new_file_list)


def set_single_instance():
    pid_file = os.path.join(AppConfig.CACHE_PATH, AppConfig.PID_FILE)
    time.sleep(0.5)
    if os.path.exists(pid_file):
        with open(pid_file, "r") as f:
            other_pid = f.read()

            if psutil.pid_exists(int(other_pid)):
                print("Don't startup multi instance, other pid is: {}".format(other_pid))
                return False

    with open(pid_file, "w") as f:
        f.write(str(os.getpid()))

    return True


if __name__ == '__main__':
    # write instance
    start_up_signal = set_single_instance()
    if not start_up_signal:
        print("Don't start multi-instance in once, program will exit in 5s")
        time.sleep(5)
        exit(0)

    loop_count = 1
    main_init()
    while True:
        try:
            main_loop()
            logging.info("Interval Other Done, loop: {}".format(loop_count))
            loop_count += 1
            time.sleep(LOOP_INTERVAL)
        except Exception as e:
            logging.exception(traceback.format_exc())
            logging.info("文件写入失败，请尝试重启电脑")
            time.sleep(LOOP_INTERVAL)
