import hashlib
import logging
import os
import re
import threading
import time
from datetime import datetime

import pyinotify
import pytz
import requests
from django.core.exceptions import MultipleObjectsReturned
from django.db.models import Q, F
import asyncio

from django.utils import timezone
from django.db import connections, router
# from django.db.backends.mysql.schema import DatabaseSchemaEditor
from django.db.backends.sqlite3.schema import DatabaseSchemaEditor
import traceback
from django.apps import apps

from iharbor_sync_client import settings
from log.logmanager import loggethander
from sync_web import exceptions
from sync_web.models import SyncFileBaseInfo, BucketSyncBaseInfo, SyncErrorLog, InotifyFileBaseInfo, AsyncTask
from storage.iharbor_handler import IharborStorage, get_str_hexMD5, BucketFileManagement

logger = logging.getLogger('django.request')  # 这里的日志记录器要和setting中的loggers选项对应，不能随意给参
logger2 = loggethander()  #
logger3 = loggethander(name='inotify_wroker', path='/var/log/iharbor_sync_client/client_inotify.log')  #


class EventHandler(pyinotify.ProcessEvent):
    def process_default(self, event):
        file_date = datetime.fromtimestamp(os.path.getmtime(event.pathname))
        print(f"File: {event.pathname} | Date: {file_date}")


class SyncFileHandler:
    """同步、监控模块"""

    # def __init__(self, local_dir):
    #     self.local_dir = local_dir
    #
    async def inotifyworker(self, bucket_obj):

        # while True:

        watch_manager = pyinotify.WatchManager()
        notifier = pyinotify.Notifier(watch_manager)

        watch_manager.add_watch(self.local_dir,
                                pyinotify.IN_CREATE | pyinotify.IN_DELETE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO,
                                rec=True)

        notifier.loop()

    async def worker(self):
        async_obj = AsyncTask.objects.filter(name="main_sync_worker").first()

        if not async_obj:
            AsyncTask.objects.create(name="main_sync_worker")
        else:
            return

        while True:
            obj = BucketSyncBaseInfo.objects.all()
            if not obj:
                async_obj = AsyncTask.objects.filter(name="main_sync_worker").first()
                if async_obj:
                    async_obj.delete()
                return

            for bucket in obj:
                if bucket.sync_status == 1 and bucket.sync_status_enable == False:
                    variable = f"{bucket.id}"  # 这里代表您的变量
                    pattern = rf"sync_bucket_{variable}_main_thread$"
                    if not find_thread_by_name_regex(pattern):
                        # 启动任务
                        name = f'sync_bucket_{bucket.id}_main_thread'
                        t = threading.Thread(target=ScanHandler(bucket_obj=bucket).main, name=name)
                        t.start()

                if bucket.sync_status in [0, 2] and bucket.sync_status_enable == True:

                    variable = f"{bucket.id}"  # 这里代表您的变量
                    pattern = rf"^{variable}_\w+_thread$"
                    if find_thread_by_name_regex(pattern):
                        bucket.sync_status = 1
                        bucket.save(update_fields=['sync_status'])
                        l = []
                        for thread in threading.enumerate():
                            if re.search(pattern, thread.name):
                                l.append(thread.name)

                        msg = f'备份表 id={bucket.id}, 存储桶 {bucket.bucket_name}, 未备份完状态设置错误，更新为同步中，检测相应的线程还在工作: {l}'
                        SyncErrorLog.objects.create(
                            bucket_id=bucket.id,
                            err_msg=msg
                        )

                    else:
                        # 关闭对应的进程
                        bucket.sync_status_enable = False
                        bucket.save(update_fields=['sync_status_enable'])

                if bucket.sync_status == 1 and bucket.sync_status_enable == True:
                    variable = f"{bucket.id}"  # 这里代表您的变量
                    pattern = rf"^{variable}_\w+_thread$"
                    if find_thread_by_name_regex(pattern):
                        bucket.sync_status = 1
                        bucket.save(update_fields=['sync_status'])
                        l = []
                        for thread in threading.enumerate():
                            if re.search(pattern, thread.name):
                                l.append(thread.name)

                        msg = f'备份表 id={bucket.id}, 存储桶 {bucket.bucket_name}, 未备份完状态设置错误，更新为同步中，检测相应的线程换在工作: {l}'
                        SyncErrorLog.objects.create(
                            bucket_id=bucket.id,
                            err_msg=msg
                        )
                    else:
                        # 关闭对应的进程
                        bucket.sync_status_enable = False
                        bucket.save(update_fields=['sync_status_enable'])

            await asyncio.sleep(3)


def get_local_file_mod_time(fullname):
    # 获取文件最后修改时间
    modified_time = os.path.getmtime(fullname)
    modified_time = datetime.fromtimestamp(modified_time, tz=pytz.utc)

    return modified_time


def find_thread_by_name_regex(pattern):
    for thread in threading.enumerate():
        if re.search(pattern, thread.name):
            print(f"匹配到相应的线程： {pattern} ==> {thread} ")
            return True
    return False


class ScanHandler:
    """扫描文件"""

    def __init__(self, bucket_obj):
        self.local_dir = bucket_obj.local_dir
        self.bucket_obj = bucket_obj
        self.remote_path = bucket_obj.remote_path

    def list_files(self, startpath):
        for root, dirs, files in os.walk(startpath):
            # 遍历当前目录下的文件
            for file in files:
                fullname = os.path.join(root, file)
                yield fullname, root, file

    def get_local_remote_path(self, fullname, file):
        """获取本地路径文件在对象存储中位置目录"""

        if not self.local_dir.endswith('/'):
            self.local_dir = f'{self.local_dir}/'

        if not self.remote_path.endswith('/'):
            self.remote_path = f'{self.remote_path}/'

        l = fullname.split(self.local_dir)

        remote_full_name = os.path.join(self.remote_path, l[1])

        remote_full_dir = remote_full_name.split(file)[0]

        return remote_full_name, remote_full_dir

    def look_dir(self):
        for fullname, file_dir, file in self.list_files(self.local_dir):

            if os.path.isfile(fullname):
                # 在这里执行你想要对文件执行的操作
                # 获取文件大小（以字节为单位）
                file_size = os.path.getsize(fullname)

                modified_time = get_local_file_mod_time(fullname)
                # print(f"{fullname} Last modified: {modified_time}")

                # 远程的目录
                full_path, full_dir = self.get_local_remote_path(fullname=fullname, file=file)
                file_path_md5 = get_str_hexMD5(s=fullname)
                table_name = self.bucket_obj.get_bucket_table_name()
                bfm = BucketFileManagement(path=file_dir, collection_name=table_name)

                try:
                    obj = bfm.get_dir_or_obj_exists(name=fullname)
                except Exception as e:

                    msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 本地路径：{fullname} 查询时错误。 err: {str(e)}'
                    logger.error(msg=msg)
                    SyncErrorLog.objects.create(
                        bucket_id=self.bucket_obj.id,
                        err_msg=msg
                    )
                    continue

                if not obj:
                    model_cls = bfm.get_obj_model_class()
                    # full_filename = bfm.build_dir_full_name(file)
                    bfinfo = model_cls(
                        bucket_id=self.bucket_obj.id,
                        file_path=fullname,  # 全路径文件名
                        file_name=file,  # 文件名
                        path_md5=file_path_md5,  # 文件
                        remote_path=full_path,
                        file_size=int(file_size), mod_time=modified_time)  # 文件大小

                    try:
                        bfinfo.save()
                    except Exception as e:
                        msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 本地路径：{fullname} 写入数据库失败，需要重新检测。 err: {str(e)}'
                        logger.error(msg=msg)
                        SyncErrorLog.objects.create(
                            bucket_id=self.bucket_obj.id,
                            err_msg=msg
                        )

                    continue

                if obj.file_size != int(file_size):
                    if obj.sync_time:
                        obj.sync_time = None
                        obj.file_size = int(file_size)
                        obj.mod_time = modified_time
                        try:
                            obj.save(update_fields=['sync_time', 'file_size', 'mod_time'])
                        except Exception as e:
                            msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 数据库文件id {obj.id} ,本地路径：{fullname} 数据库设置同步时间失败。 err: {str(e)}'
                            logger.error(msg=msg)
                            SyncErrorLog.objects.create(
                                bucket_id=self.bucket_obj.id,
                                err_msg=msg
                            )
                        continue

                    obj.file_size = int(file_size)
                    obj.mod_time = modified_time
                    obj.save(update_fields=['sync_time', 'mod_time'])
                # print(f'obj.mod_time = {obj.mod_time}')
                # print(f'modified_time = {modified_time}')

                if obj.mod_time < modified_time:
                    # print(f'文件id= {obj.id} , 修改时间={obj.mod_time} , 本地文件修改时间={modified_time}')
                    if obj.sync_time:
                        obj.sync_time = None
                        obj.mod_time = modified_time
                        try:
                            obj.save(update_fields=['sync_time', 'mod_time'])

                        except Exception as e:
                            msg = f'备份信息表:{self.bucket_obj.id}, 存储桶:{self.bucket_obj.bucket_name}, 数据库文件id {obj.id} ,本地路径：{fullname} 数据库设置同步时间和修改时间失败。 err: {str(e)}'
                            logger.error(msg=msg)
                            SyncErrorLog.objects.create(
                                bucket_id=self.bucket_obj.id,
                                err_msg=msg
                            )

                        continue
                    obj.mod_time = modified_time
                    obj.save(update_fields=['mod_time'])

        logger2.info(msg=f'{timezone.now()} - 已检测完成。')

    # def find_thread_by_name_regex(self, pattern):
    #     for thread in threading.enumerate():
    #         if re.search(pattern, thread.name):
    #             return True
    #     return False

    def start_sync_file_server(self):

        table_name = self.bucket_obj.get_bucket_table_name()
        bfm = BucketFileManagement(path='', collection_name=table_name)
        objs = bfm.get_sync_obj()
        for obj in objs:

            if obj.sync_time:
                if obj.sync_time > obj.mod_time:
                    # print(f'id === {obj.id}')
                    continue

            oos = IharborStorage(sync_bucket=self.bucket_obj, sync_file=obj)

            flag = True
            while flag:
                num = threading.active_count()
                if num < self.bucket_obj.thread_num:
                    name = f'{self.bucket_obj.id}_{obj.id}_thread'
                    t = threading.Thread(target=oos.main, name=name)
                    t.start()

                    flag = False
                else:
                    logger2.info(f'等待进程工作完成： {num}')
                    for i in threading.enumerate():
                        logger2.info(f'正在工作的进程  {i}')
                    time.sleep(3)

        flagend = True
        while flagend:

            variable = f"{self.bucket_obj.id}"  # 这里代表您的变量
            pattern = rf"^{variable}_\w+_thread$"
            if not find_thread_by_name_regex(pattern):
                print(f'=====》 等待线程结束： {threading.enumerate()}')
                flagend = False
                self.bucket_obj.sync_status = 2
                self.bucket_obj.sync_status_enable = True
                self.bucket_obj.sync_time = timezone.now()
                self.bucket_obj.save(update_fields=['sync_status', 'sync_time', 'sync_status_enable'])
                print("=====》 工作线程已结束！！！")
            else:
                logger2.info(f'=====》 等待线程结束： {threading.enumerate()}')
                print(f'=====》 等待线程结束： {threading.enumerate()}')
                time.sleep(3)

    def main(self):
        # 调用异步任务
        # name = f'{self.bucket_obj.id}_asyncio_check'
        try:
            self.look_dir()
        except Exception as e:
            msg = f'备份信息表：id={self.bucket_obj.id}, 存储桶 {self.bucket_obj.bucket_name} 遍历本地目录出现问题 ：{str(e)}'
            SyncErrorLog.objects.create(
                bucket_id=self.bucket_obj.id,
                err_msg=msg
            )

            self.bucket_obj.sync_status = 0
            self.bucket_obj.save(update_fields=['sync_status'])  # 开启同步任务
            return

        # time.sleep(10)
        # name2 = f'{self.bucket_obj.id}_asyncio_sync'

        self.bucket_obj.sync_status_enable = True
        self.bucket_obj.save(update_fields=['sync_status_enable'])  # 开启同步任务
        try:
            self.start_sync_file_server()
        except Exception as e:
            msg = f'备份信息表：id={self.bucket_obj.id}, 存储桶 {self.bucket_obj.bucket_name} 遍历本地目录出现问题 ：{str(e)}'
            SyncErrorLog.objects.create(
                bucket_id=self.bucket_obj.id,
                err_msg=msg
            )

            self.bucket_obj.sync_status = 0
            self.bucket_obj.sync_status_enable = False
            self.bucket_obj.save(update_fields=['sync_status', 'sync_status_enable'])
            return

        self.bucket_obj.sync_status_enable = False
        self.bucket_obj.sync_status = 2
        self.bucket_obj.save(update_fields=['sync_status_enable', 'sync_status'])  # 开启同步任务
        return True



