import os
import re
import threading
import time
from datetime import datetime

import pyinotify

import asyncio

from django.utils import timezone

from log.logmanager import logger, logger2
from sync_web.models import BucketSyncBaseInfo, SyncErrorLog, AsyncTask
from storage.iharbor_handler import IharborStorage, get_str_hexMD5, BucketFileManagement
from utils.local_handler import find_thread_by_name_regex, get_local_file_mod_time


class EventHandler(pyinotify.ProcessEvent):
    def process_default(self, event):
        file_date = datetime.fromtimestamp(os.path.getmtime(event.pathname))
        print(f"File: {event.pathname} | Date: {file_date}")


class SyncFileHandler:
    """同步、监控模块"""

    # def __init__(self, local_dir):
    #     self.local_dir = local_dir
    #
    async def inotifyworker(self, bucket_obj):

        # while True:

        watch_manager = pyinotify.WatchManager()
        notifier = pyinotify.Notifier(watch_manager)

        watch_manager.add_watch(self.local_dir,
                                pyinotify.IN_CREATE | pyinotify.IN_DELETE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO,
                                rec=True)

        notifier.loop()

    async def worker(self, bucket):
        # async_obj = AsyncTask.objects.filter(name="main_sync_worker").first()
        #
        # if not async_obj:
        #     AsyncTask.objects.create(name="main_sync_worker")
        # else:
        #     return

        # while True:
        #     obj = BucketSyncBaseInfo.objects.all()
        #     if not obj:
        #         async_obj = AsyncTask.objects.filter(name="main_sync_worker").first()
        #         if async_obj:
        #             async_obj.delete()
        #
        #         # 未有需要同步的备份信息
        #         return
        #
        #     for bucket in obj:

        variable = f"{bucket.id}"  # 这里代表您的变量
        pattern = rf"^sync_bucket_{variable}_main_thread$"
        if not find_thread_by_name_regex(pattern):

            # 启动任务
            name = f'sync_bucket_{bucket.id}_main_thread'
            t = threading.Thread(target=ScanHandler(bucket_obj=bucket).main, name=name)
            t.start()
            print(f'桶 ==== 》  {threading.enumerate()}')
        else:
            if bucket.sync_status == 1 and bucket.sync_status_enable == False:
                bucket.sync_status = 1
                bucket.save(update_fields=['sync_status'])
                return

            if bucket.sync_status == 2 and bucket.sync_status_enable == True:
                variable = f"{bucket.id}"  # 这里代表您的变量
                pattern = rf"^run_sync_server_{variable}_thread$"
                if not find_thread_by_name_regex(pattern):
                    bucket.sync_status_enable = False
                    bucket.save(update_fields=['sync_status_enable'])
                else:
                    bucket.sync_status = 1
                    bucket.sync_time = None
                    bucket.save(update_fields=['sync_status_enable', 'sync_time'])

        # if bucket.sync_status in [0, 2] and bucket.sync_status_enable == True:
    #
    #     variable = f"{bucket.id}"  # 这里代表您的变量
    #     pattern = rf"^{variable}_\w+_thread$"
    #     if find_thread_by_name_regex(pattern):
    #         bucket.sync_status = 1
    #         bucket.save(update_fields=['sync_status'])
    #         l = []
    #         for thread in threading.enumerate():
    #             if re.search(pattern, thread.name):
    #                 l.append(thread.name)
    #
    #         msg = f'备份表 id={bucket.id}, 存储桶 {bucket.bucket_name}, 未备份完状态设置错误，更新为同步中，检测相应的线程在还工作: {l}'
    #         SyncErrorLog.objects.create(
    #             bucket_id=bucket.id,
    #             err_msg=msg
    #         )
    #
    #     else:
    #         # 关闭对应的进程
    #         bucket.sync_status_enable = False
    #         bucket.save(update_fields=['sync_status_enable'])
    #
    # if bucket.sync_status == 1 and bucket.sync_status_enable == True:
    #     variable = f"{bucket.id}"  # 这里代表您的变量
    #     pattern = rf"^{variable}_\w+_thread$"
    #     if find_thread_by_name_regex(pattern):
    #         # bucket.sync_status = 1
    #         # bucket.save(update_fields=['sync_status'])
    #         l = []
    #         for thread in threading.enumerate():
    #             if re.search(pattern, thread.name):
    #                 l.append(thread.name)
    #
    #         msg = f'备份表 id={bucket.id}, 存储桶 {bucket.bucket_name}, 未备份完状态设置错误，更新为同步中，检测相应的线程换在工作: {l}'
    #         SyncErrorLog.objects.create(
    #             bucket_id=bucket.id,
    #             err_msg=msg
    #         )
    #     else:
    #         # 关闭对应的进程
    #         bucket.sync_status_enable = False
    #         bucket.sync_status = 0
    #         bucket.save(update_fields=['sync_status_enable', 'sync_status'])

    # await asyncio.sleep(3)


class ScanHandler:
    """扫描文件"""

    def __init__(self, bucket_obj):
        self.local_dir = bucket_obj.local_dir
        self.bucket_obj = bucket_obj
        self.remote_path = bucket_obj.remote_path
        self.recursive_flag = 0  # 全局变量，用于标明当前是递归的第几层
        self.break_names = []  # 如果文件(目录)名在这个list里面，则跳过不处理

    def process_one_path(self, path):  # 递归遍历目录文件
        self.recursive_flag = self.recursive_flag + 1  # 进入递归，标记加一
        head_str = ''
        for i in range(0, self.recursive_flag):  # recursive_flag为1的时候，循环执行一次
            head_str += '-'
        head_str += f"layer {self.recursive_flag}"
        files = os.listdir(path)
        # 输出所有文件和文件夹
        for file in files:
            if file in self.break_names:
                continue

            if path.endswith('/'):
                current_path = path + file
            else:
                current_path = path + '/' + file
            if os.path.isdir(current_path):
                logger2.info(f"[dir  {head_str}]:   {file}    --------full path: {current_path}")
                self.process_one_path(current_path)

                # print(f"[dir  {head_str}]:   {file}    --------full path: {current_path}")
                # process_one_path(current_path)
            elif os.path.isfile(current_path):
                # print(f"    [{head_str} file]:  {file}")
                self.update_or_write_sql_path(current_path, path, file)
            else:
                logger2.info(f"[????] {head_str}  {current_path}")

        self.recursive_flag = self.recursive_flag - 1;  # 完成递归，标记减一

        if self.recursive_flag == 0:
            logger2.info(f'======> {self.local_dir} 目录遍历完成')
            # self.bucket_obj.dir_flag = False
            # self.bucket_obj.save(update_fields=['dir_flag'])

    # def list_files(self, startpath):
    #     for root, dirs, files in os.walk(startpath):
    #         # 遍历当前目录下的文件
    #         for file in files:
    #             fullname = os.path.join(root, file)
    #             yield fullname, root, file

    def get_local_remote_path(self, fullname, file):
        """获取本地路径文件在对象存储中位置目录"""

        if not self.local_dir.endswith('/'):
            self.local_dir = f'{self.local_dir}/'

        if not self.remote_path.endswith('/'):
            self.remote_path = f'{self.remote_path}/'

        l = fullname.split(self.local_dir)

        remote_full_name = os.path.join(self.remote_path, l[1])

        remote_full_dir = remote_full_name.split(file)[0]

        return remote_full_name, remote_full_dir

    def update_or_write_sql_path(self, fullname, file_dir, file):
        """将文件路径写入数据库"""

        # 在这里执行你想要对文件执行的操作
        # 获取文件大小（以字节为单位）
        file_size = os.path.getsize(fullname)

        modified_time = get_local_file_mod_time(fullname)
        # print(f"{fullname} Last modified: {modified_time}")

        # 远程的目录
        full_path, full_dir = self.get_local_remote_path(fullname=fullname, file=file)
        file_path_md5 = get_str_hexMD5(s=fullname)
        table_name = self.bucket_obj.get_bucket_table_name()
        bfm = BucketFileManagement(path=file_dir, collection_name=table_name)

        try:
            obj = bfm.get_dir_or_obj_exists(name=fullname)
        except Exception as e:

            msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 本地路径：{fullname} 查询时错误。 err: {str(e)}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.bucket_obj.id,
                err_msg=msg
            )
            return

        if not obj:
            model_cls = bfm.get_obj_model_class()
            # full_filename = bfm.build_dir_full_name(file)
            bfinfo = model_cls(
                bucket_id=self.bucket_obj.id,
                file_path=fullname,  # 全路径文件名
                file_name=file,  # 文件名
                path_md5=file_path_md5,  # 文件
                remote_path=full_path,
                file_size=int(file_size), mod_time=modified_time)  # 文件大小

            try:
                bfinfo.save()
            except Exception as e:
                msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 本地路径：{fullname} 写入数据库失败，需要重新检测。 err: {str(e)}'
                logger.error(msg=msg)
                SyncErrorLog.objects.create(
                    bucket_id=self.bucket_obj.id,
                    err_msg=msg
                )

            return

        if obj.file_size != int(file_size):
            if obj.sync_time:
                obj.sync_time = None
                obj.file_size = int(file_size)
                obj.mod_time = modified_time
                try:
                    obj.save(update_fields=['sync_time', 'file_size', 'mod_time'])
                except Exception as e:
                    msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 数据库文件id {obj.id} ,本地路径：{fullname} 数据库设置同步时间失败。 err: {str(e)}'
                    logger.error(msg=msg)
                    SyncErrorLog.objects.create(
                        bucket_id=self.bucket_obj.id,
                        err_msg=msg
                    )
                return

            obj.file_size = int(file_size)
            obj.mod_time = modified_time
            obj.save(update_fields=['sync_time', 'mod_time'])
        # print(f'obj.mod_time = {obj.mod_time}')
        # print(f'modified_time = {modified_time}')

        if obj.mod_time < modified_time:
            # print(f'文件id= {obj.id} , 修改时间={obj.mod_time} , 本地文件修改时间={modified_time}')
            if obj.sync_time:
                obj.sync_time = None
                obj.mod_time = modified_time
                try:
                    obj.save(update_fields=['sync_time', 'mod_time'])

                except Exception as e:
                    msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 数据库文件id {obj.id} ,本地路径：{fullname} 数据库设置同步时间和修改时间失败。 err: {str(e)}'
                    logger.error(msg=msg)
                    SyncErrorLog.objects.create(
                        bucket_id=self.bucket_obj.id,
                        err_msg=msg
                    )
                return
            obj.mod_time = modified_time
            obj.save(update_fields=['mod_time'])

        return

    def start_sync_file_server(self):

        table_name = self.bucket_obj.get_bucket_table_name()
        bfm = BucketFileManagement(path='', collection_name=table_name)
        objs = bfm.get_sync_obj()

        for obj in objs:

            if obj.sync_time:
                if obj.sync_time > obj.mod_time:
                    # print(f'id === {obj.id}')
                    continue

            flag = True
            while flag:
                num = threading.active_count()
                if num < self.bucket_obj.thread_num:
                    name = f'{self.bucket_obj.id}_{obj.id}_thread'
                    t = threading.Thread(target=IharborStorage(sync_bucket=self.bucket_obj, sync_file=obj).main,
                                         name=name)
                    t.start()

                    flag = False
                else:
                    logger2.info(f'等待进程工作完成： {num}')
                    for i in threading.enumerate():
                        logger2.info(f'正在工作的进程  {i}')
                    time.sleep(3)

        flagend = True
        while flagend:

            variable = f"{self.bucket_obj.id}"  # 这里代表您的变量
            pattern = rf"^{variable}_\w+_thread$"
            if not find_thread_by_name_regex(pattern):
                logger2.info(f'=====》 等待线程结束： {threading.enumerate()}')

                table_name = self.bucket_obj.get_bucket_table_name()
                bfm = BucketFileManagement(path='', collection_name=table_name)
                objs = bfm.get_sync_obj()

                if not objs:
                    flagend = False
                    self.bucket_obj.sync_status = 2
                    self.bucket_obj.sync_status_enable = False  # 关闭
                    self.bucket_obj.sync_time = timezone.now()
                    self.bucket_obj.save(update_fields=['sync_status', 'sync_time', 'sync_status_enable'])
                    logger2.info(
                        f"=====》 工作线程已结束！！！ {self.bucket_obj.id}-{self.bucket_obj.bucket_name} 同步结束。")

                    return
                else:
                    self.start_sync_file_server()
            else:
                logger2.info(f'=====》 等待线程结束： {threading.enumerate()}')
                time.sleep(3)

    def thread_look_dir(self):
        try:

            self.process_one_path(path=self.local_dir)
        except Exception as e:
            msg = f'备份信息表：id={self.bucket_obj.id}, 存储桶 {self.bucket_obj.bucket_name} 遍历本地目录出现问题 ：{str(e)}'
            SyncErrorLog.objects.create(
                bucket_id=self.bucket_obj.id,
                err_msg=msg
            )

            self.bucket_obj.sync_status = 0
            self.bucket_obj.save(update_fields=['sync_status'])  # 开启同步任务
            return

    def thread_run_rync_server(self):
        self.bucket_obj.sync_status_enable = True
        self.bucket_obj.save(update_fields=['sync_status_enable'])  # 开启同步任务
        try:
            self.start_sync_file_server()
        except Exception as e:
            msg = f'备份信息表：id={self.bucket_obj.id}, 存储桶 {self.bucket_obj.bucket_name} 遍历本地目录出现问题 ：{str(e)}'
            SyncErrorLog.objects.create(
                bucket_id=self.bucket_obj.id,
                err_msg=msg
            )

            self.bucket_obj.sync_status = 0
            self.bucket_obj.sync_status_enable = False
            self.bucket_obj.save(update_fields=['sync_status', 'sync_status_enable'])
            return

        # self.bucket_obj.sync_status_enable = False
        # self.bucket_obj.sync_status = 2
        # self.bucket_obj.save(update_fields=['sync_status_enable', 'sync_status'])

    def main(self):
        # 调用异步任务
        # name = f'{self.bucket_obj.id}_asyncio_check'

        variable = f"{self.bucket_obj.id}"  #
        pattern = rf"^look_dir_{variable}_thread$"
        if not find_thread_by_name_regex(pattern):
            if self.bucket_obj.dir_flag:
                name = f'look_dir_{self.bucket_obj.id}_thread'
                logger2.info(f'=====》 遍历目录线程：{name}')
                t = threading.Thread(target=self.thread_look_dir, name=name)
                t.start()
                self.bucket_obj.dir_flag = False
                self.bucket_obj.save(update_fields=['dir_flag'])

        time.sleep(5)
        # name2 = f'{self.bucket_obj.id}_asyncio_sync'
        variable = f"{self.bucket_obj.id}"  # 这里代表您的变量
        pattern = rf"^run_sync_server_{variable}_thread$"
        if not find_thread_by_name_regex(pattern):
            name = f'run_sync_server_{self.bucket_obj.id}_thread'
            logger2.info(f'=====-==================================》 启动同步线程：{name}')
            t = threading.Thread(target=self.thread_run_rync_server, name=name)
            t.start()

        return True
