import hashlib
import os
import time
import traceback

import requests
from django.apps import apps
from django.core.exceptions import MultipleObjectsReturned
from django.db.models import Q
from django.utils import timezone

from log.logmanager import logger, logger2
from sync_web import exceptions
from sync_web.models import SyncErrorLog, SyncFileBaseInfo, InotifyFileBaseInfo, BucketSyncBaseInfo
from django.db.backends.sqlite3.schema import DatabaseSchemaEditor
from django.db import connections, router

from utils.local_handler import get_local_file_mod_time


class IharborStorage:
    """存储模块"""

    def __init__(self, sync_bucket, sync_file):
        self.bucket_name = sync_bucket.bucket_name
        self.objpath = sync_file.remote_path
        self.token = sync_bucket.bucket_token
        self.base_url = sync_bucket.bash_url
        self.block_size = 10485760  # 10M
        self.sync_bucket = sync_bucket  # 表中的数据 配置表
        self.sync_file = sync_file  # 表中的数据 同步表

    def update_file(self, path):
        # 上传100M文件
        if self.objpath.startswith('/'):
            url = f'{self.base_url}/api/v1/obj/{self.bucket_name}{self.objpath}/'
        else:
            url = f'{self.base_url}/api/v1/obj/{self.bucket_name}/{self.objpath}/'
        # print(f'110M url={url}')
        with open(path, 'rb') as pathfile:
            files = {'file': pathfile.read()}
        # files = {'file': open(path, 'rb')}
        headers = {
            'Authorization': f'BucketToken {self.token}'
        }
        res = requests.put(url, files=files, headers=headers)
        if res.status_code != 200:
            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {res.text}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            return
        self.sync_file.sync_time = timezone.now()
        try:
            self.sync_file.save(update_fields=['sync_time'])
        except Exception as e:
            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {str(e)}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            time.sleep(5)

        return

    def update_file_size0(self, path):
        """空文件"""

        if self.objpath.startswith('/'):
            url = f'{self.base_url}/api/v1/metadata/{self.bucket_name}{self.objpath}/'
        else:
            url = f'{self.base_url}/api/v1/metadata/{self.bucket_name}/{self.objpath}/'

        print(f'空文件 url={url}')
        headers = {
            'Authorization': f'BucketToken {self.token}'
        }
        res = requests.post(url, headers=headers)
        if res.status_code != 200:
            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {res.text}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            # {"code":404,"code_text":"创建失败，对象已存在"}
            if res.json().get('code_text') == "创建失败，对象已存在":
                # print(f'空文件对象存在： == {res.json().get("code_text")}')
                self.sync_file.sync_time = timezone.now()
                try:
                    self.sync_file.save(update_fields=['sync_time'])
                except Exception as e:
                    msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {str(e)}'
                    logger.error(msg=msg)
                    SyncErrorLog.objects.create(
                        bucket_id=self.sync_bucket.id,
                        err_msg=msg
                    )
                    time.sleep(5)
                return
        self.sync_file.sync_time = timezone.now()
        try:
            self.sync_file.save(update_fields=['sync_time'])
        except Exception as e:

            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {str(e)}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            time.sleep(5)
        return

    def update(self, chunk, count, reset):
        if self.objpath.startswith('/'):
            url = f'{self.base_url}/api/v2/obj/{self.bucket_name}{self.objpath}'
        else:
            url = f'{self.base_url}/api/v2/obj/{self.bucket_name}/{self.objpath}'
        # print(f'大文件 url={url}')
        m = hashlib.md5()

        m.update(chunk)

        headers = {
            'Content-MD5': m.hexdigest(),
            'Authorization': f'BucketToken {self.token}'
        }
        params = {
            'offset': count * self.block_size,
        }

        if reset and count == 0:
            params['reset'] = 'true'

        r = requests.post(url=url, headers=headers, data=chunk, params=params)
        # print(r.url)
        if r.status_code != 200:
            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 第 {count} 块 未上传成功，err {r.text}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            return
        return

    def chunk_file_reader(self, fp):
        while True:
            chunk = fp.read(self.block_size)
            if not chunk:
                break
            yield chunk

    def update_file_chunk(self, path, flag=False, size=0, reset=False):
        """

        :param path:
        :param flag: 断点续传
        :param reset: 重置
        :return:
        """
        count = 0
        with open(path, 'rb') as fp:
            if flag:
                fp.seek(size)

            for chunk in self.chunk_file_reader(fp):
                # 上传块
                self.update(chunk=chunk, count=count, reset=reset)
                count += 1

        self.sync_file.sync_time = timezone.now()
        try:
            self.sync_file.save(update_fields=['sync_time'])
        except Exception as e:
            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {str(e)}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            time.sleep(5)
        return

    def get_remote_file(self):
        """获取存储文件信息"""
        url = f'{self.base_url}/api/v1/metadata/{self.bucket_name}/{self.objpath}/'

        headers = {
            'Authorization': f'BucketToken {self.token}'
        }
        res = requests.get(url, headers=headers)
        if res.status_code != 200:
            if res.status_code == 404:
                return 0

            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 未上传成功，err {res.text}'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            return None

        return res.json().get('obj')['si']

    def main(self):
        # 查看文件是否存在
        if not os.path.isfile(self.sync_file.file_path):
            msg = f'配置表中 {self.sync_bucket.id} 的存储桶 {self.sync_bucket.bucket_name} 文件 id={self.sync_file.id}, 路径= {self.sync_file.file_path} 文件不存在，已被删除。'
            logger.error(msg=msg)
            SyncErrorLog.objects.create(
                bucket_id=self.sync_bucket.id,
                err_msg=msg
            )
            self.sync_file.delete()
            return
        file_size = os.path.getsize(self.sync_file.file_path)

        if int(file_size) == 0:
            # 空文件上传
            logger2.info(f'开始同步文件 {self.sync_file.file_path}')
            self.update_file_size0(path=self.sync_file.file_path)

            logger2.info(f'结束同步文件 {self.sync_file.file_path}')
            return

        elif int(file_size) > 104857600:  # 100M
            # 分片
            si = self.get_remote_file()  # 获取远程文件信息
            if not si or si == 0:
                # 分片
                logger2.info(f'开始同步文件 {self.sync_file.file_path}')
                self.update_file_chunk(path=self.sync_file.file_path)

                logger2.info(f'结束同步文件 {self.sync_file.file_path}')

            else:
                # 断点
                if self.sync_file.sync_time is None:

                    if get_local_file_mod_time(self.sync_file.file_path) > self.sync_file.mod_time:
                        # 重新上传
                        logger2.info(f'开始重传文件 {self.sync_file.file_path}')
                        self.update_file_chunk(path=self.sync_file.file_path, flag=True, reset=True)
                        logger2.info(f'结束重传文件 {self.sync_file.file_path}')

                        return

                    elif int(file_size) == si:
                        self.sync_file.sync_time = timezone.now()
                        self.sync_file.save(update_fields=['sync_time'])

                        return
                    elif int(file_size) < si:
                        # 重新上传
                        logger2.info(f'开始重传文件 {self.sync_file.file_path}')
                        self.update_file_chunk(path=self.sync_file.file_path, flag=True, reset=True)
                        logger2.info(f'结束重传文件 {self.sync_file.file_path}')

                        return

                    elif int(file_size) > si:

                        logger2.info(f'开始续传文件 {self.sync_file.file_path}')
                        self.update_file_chunk(path=self.sync_file.file_path, flag=True, size=int(si))
                        logger2.info(f'结束续传文件 {self.sync_file.file_path}')

                        return

                elif self.sync_file.sync_time < self.sync_file.mod_time:
                    logger2.info(f'开始重传文件 {self.sync_file.file_path}')
                    self.update_file_chunk(path=self.sync_file.file_path, flag=True, reset=True)
                    logger2.info(f'结束重传文件 {self.sync_file.file_path}')
                    return
            return

        # 小于100M 文件
        logger2.info(f'开始同步文件 {self.sync_file.file_path}')
        self.update_file(path=self.sync_file.file_path)
        logger2.info(f'结束同步文件 {self.sync_file.file_path}')
        return


def get_obj_model_class(table_name):
    """
    动态创建存储桶对应的对象模型类

    RuntimeWarning: Model 'xxxxx_' was already registered. Reloading models is not advised as it can
    lead to inconsistencies most notably with related models.
    如上述警告所述, Django 不建议重复创建Model 的定义.可以直接通过get_obj_model_class创建，无视警告.
    这里先通过 get_registered_model 获取已经注册的 Model, 如果获取不到， 再生成新的模型类.

    :param table_name: 数据库表名，模型类对应的数据库表名
    :return: Model class
    """
    model_name = 'ObjModel' + table_name
    app_leble = SyncFileBaseInfo.Meta.app_label
    try:
        cls = apps.get_registered_model(app_label=app_leble, model_name=model_name)
        return cls
    except LookupError:
        pass

    meta = SyncFileBaseInfo.Meta()
    meta.abstract = False
    meta.db_table = table_name  # 数据库表名
    return type(model_name, (SyncFileBaseInfo,), {'Meta': meta, '__module__': SyncFileBaseInfo.__module__})


def get_obj_inotify_model_class(table_name):
    """
    动态创建存储桶对应的对象模型类

    RuntimeWarning: Model 'xxxxx_' was already registered. Reloading models is not advised as it can
    lead to inconsistencies most notably with related models.
    如上述警告所述, Django 不建议重复创建Model 的定义.可以直接通过get_obj_model_class创建，无视警告.
    这里先通过 get_registered_model 获取已经注册的 Model, 如果获取不到， 再生成新的模型类.

    :param table_name: 数据库表名，模型类对应的数据库表名
    :return: Model class
    """
    model_name = 'ObjModel' + table_name
    app_leble = InotifyFileBaseInfo.Meta.app_label
    try:
        cls = apps.get_registered_model(app_label=app_leble, model_name=model_name)
        return cls
    except LookupError:
        pass

    meta = InotifyFileBaseInfo.Meta()
    meta.abstract = False
    meta.db_table = table_name  # 数据库表名
    return type(model_name, (InotifyFileBaseInfo,), {'Meta': meta, '__module__': InotifyFileBaseInfo.__module__})


def create_table_for_model_class(model):
    """
    创建Model类对应的数据库表

    :param model: Model类
    :return:
            True: success
            False: failure
    """
    try:
        using = router.db_for_write(model)
        with DatabaseSchemaEditor(connection=connections[using]) as schema_editor:
            schema_editor.create_model(model)
    except Exception as e:
        msg = traceback.format_exc()
        logger.error(msg)
        return False

    return True


def delete_table_for_model_class(model):
    """
    删除Model类对应的数据库表

    :param model: Model类
    :return:
            True: success
            False: failure
    """
    try:
        using = router.db_for_write(model)
        with DatabaseSchemaEditor(connection=connections[using]) as schema_editor:
            schema_editor.delete_model(model)
    except Exception as e:
        logger.error(str(e))
        if e.args[0] in [1051, 1146]:  # unknown table or table not exists
            return True

        return False

    return True


def create_bucket(bucket_name, bash_url, local_dir, remote_path, bucket_token, sync_status=0, thread_num=3):
    """用户保存配置时创建"""

    try:
        bucket = BucketSyncBaseInfo(
            bucket_name=bucket_name,
            bash_url=bash_url,
            local_dir=local_dir,
            remote_path=remote_path,
            bucket_token=bucket_token,
            sync_status=sync_status,
            thread_num=thread_num,
        )
        bucket.save(force_insert=True)
    except Exception as e:
        raise exceptions.Error(message=f"create bucket metadata failed, {str(e)}.")

    col_name = bucket.get_bucket_table_name()
    bfm = BucketFileManagement(collection_name=col_name)
    model_class = bfm.get_obj_model_class()
    try:
        create_table_for_model_class(model=model_class)
    except Exception as e:
        msg = f'备份信息表 存储桶:{bucket.bucket_name}, 创建数据表或添加备份信息有误, 请重新填写 err= {str(e)}。'
        bucket.delete()
        delete_table_for_model_class(model=model_class)
        raise exceptions.Error(f"{msg}， create bucket table failed.")

    col_name_inotify = bucket.get_inotify_table_name()
    bfm_inotify = BucketFileManagement(collection_name=col_name_inotify, flag=True)
    model_class_inotify = bfm_inotify.get_obj_model_class()
    try:
        create_table_for_model_class(model=model_class_inotify)
    except Exception as e:

        msg = f'备份信息表 存储桶:{bucket.bucket_name}, 创建监控信息数据表有误, 请重新填写 err= {str(e)}。'
        bucket.delete()
        delete_table_for_model_class(model=model_class)
        delete_table_for_model_class(model=model_class_inotify)
        raise exceptions.Error(f"{msg}, create bucket table failed.")


def get_str_hexMD5(s: str):
    """
    求字符串MD5
    """
    return hashlib.md5(s.encode(encoding='utf-8')).hexdigest()


class BucketFileManagement:
    """
    存储桶相关的操作方法类
    """

    def __init__(self, path='', collection_name='', flag=False, *args, **kwargs):
        self._path = self._hand_path(path)
        self._collection_name = collection_name  # bucket's database table name
        self.cur_dir_id = None
        self._bucket_file_class = self.creat_obj_model_class(flag=flag)

    def creat_obj_model_class(self, flag=False):
        """
        动态创建各存储桶数据库表对应的模型类
        """
        db_table = self.get_collection_name()  # 数据库表名

        if flag:
            return get_obj_inotify_model_class(db_table)
        return get_obj_model_class(db_table)

    def get_obj_model_class(self):
        if not self._bucket_file_class:
            self._bucket_file_class = self.creat_obj_model_class()

        return self._bucket_file_class

    @staticmethod
    def _hand_path(path):
        """
        path字符串两边可能的空白和右边/
        """
        if isinstance(path, str):
            path.strip(' ')
            return path.rstrip('/')

        return ''

    def get_collection_name(self):
        return self._collection_name

    def get_obj(self, path: str):
        """
        获取目录或对象

        :param path: 目录或对象路径
        :return:
            obj     # success
            None    # 不存在

        :raises: Error
        """
        na_md5 = get_str_hexMD5(path)
        model_class = self.get_obj_model_class()
        try:
            obj = model_class.objects.get(Q(path_md5=na_md5) | Q(path_md5__isnull=True), file_path=path)
        except model_class.DoesNotExist as e:
            return None
        except MultipleObjectsReturned as e:
            msg = f'数据库表{self.get_collection_name()}中存在多个相同的目录：{path}'
            logger.error(msg)
            raise exceptions.Error(message=msg)
        except Exception as e:
            msg = f'select {self.get_collection_name()},path={path},err={str(e)}'
            logger.error(msg)
            raise exceptions.Error(msg)

        return obj

    def get_sync_obj(self):
        """
        获取需要同步对象

        :param path: 目录或对象路径
        :return:
            obj     # success
            None    # 不存在

        :raises: Error
        """
        model_class = self.get_obj_model_class()
        try:
            obj = model_class.objects.filter(sync_time__isnull=True)  # sync_time__lt=F('mod_time')
        except model_class.DoesNotExist as e:
            return None
        except Exception as e:
            msg = f'select {self.get_collection_name()}, 查询失败，err={str(e)}'
            raise exceptions.Error(msg)

        return obj

    def get_all_obj(self):
        """
        获取目录或对象

        :param path: 目录或对象路径
        :return:
            obj     # success
            None    # 不存在

        :raises: Error
        """
        model_class = self.get_obj_model_class()
        try:
            obj = model_class.objects.all()
        except model_class.DoesNotExist as e:
            return None
        except Exception as e:
            msg = f'select {self.get_collection_name()},err={str(e)}'
            logger.error(msg)
            raise exceptions.Error(msg)

        return obj

    def get_sync_complete_obj(self):
        """
        获取需要同步对象

        :param path: 目录或对象路径
        :return:
            obj     # success
            None    # 不存在

        :raises: Error
        """
        model_class = self.get_obj_model_class()
        try:
            obj = model_class.objects.filter(sync_time__isnull=False).all()  # sync_time__lt=F('mod_time')
        except model_class.DoesNotExist as e:
            return None
        except Exception as e:
            msg = f'select {self.get_collection_name()}, 查询失败，err={str(e)}'
            raise exceptions.Error(msg)

        return obj

    def get_dir_or_obj_exists(self, name, check_path_exists: bool = True):
        """
        通过名称获取当前路径下的子目录或对象
        :param name: 目录名或对象名称
        :param check_path_exists: 是否检查当前路径是否存在
        :return:
            文件目录对象 or None
            raises: Exception   # 发生错误，或当前目录参数有误，对应目录不存在

        :raises: Error, NoParentPath
        """

        # path = self.build_dir_full_name(name)
        try:
            dir_or_obj = self.get_obj(path=name)
        except Exception as e:
            raise exceptions.Error(message=f'查询目录id错误，{str(e)}')

        return dir_or_obj

    def build_dir_full_name(self, dir_name):
        """
        拼接全路径

        :param dir_name: 目录名
        :return: 目录绝对路径
        """
        dir_name.strip('/')
        path = self._hand_path(self._path)
        return (path + '/' + dir_name) if path else dir_name
