import configparser
import pickle
import re
import shutil

import paramiko
import os
import hashlib
import time

from PyQt5.QtCore import pyqtBoundSignal
from ftpclient import FtpClient, FtpLoginInfo
from settings import BASE_DIR, ENCODING, IS_REAL_TIME_SYNC



# ---- 配置文件 ----
from settings import BASE_DIR, conf
from utils.publicFun import save_obj, load_obj, compress_image, unique_copy


# 生成MD5
def md5(str):
    m2 = hashlib.md5()
    m2.update(str.encode('utf8'))
    return m2.hexdigest()


# 获取文件的创建时间
def get_FileCreateTime(filePath):
    # filePath = unicode(filePath,'utf8')
    t = os.path.getctime(filePath)
    return t


# 获取文件的修改时间
def get_FileModifyTime(filePath):
    # filePath = unicode(filePath,'utf8')
    t = os.path.getmtime(filePath)
    return t


# 判断文件是否发生变动
def has_change(filepath):
    return False


def is_hidden(filename):
    if (filename.startswith('.')):
        return True
    # if (filename in config['hide']):
    #     return True
    return False

class RealTimeSync():
    _cache_hash = {}

    # _create_file_list = []
    # _modify_file_list = []
    # _delete_file_list = []
    _have_upload = []
    # sftp_sftp = None
    print("连接SFTP...")

    # 实例化SFTP

    def __init__(self):
        self.sftp_sftp = None
        self._create_file_list = []
        self._modify_file_list = []
        self._delete_file_list = []
        self._upload_file_list = []
        self._config = configparser.ConfigParser()
        self._config.read(os.path.join(BASE_DIR, conf), encoding=ENCODING)

    def clear_list(self):
        # print("清空列表")#清内存
        del self._modify_file_list[:]
        del self._create_file_list[:]
        del self._delete_file_list[:]
        del self._have_upload[:]

    def list_dir(self, local_dir):
        # global _cache_hash
        if os.path.exists(local_dir):
            for filename in os.listdir(local_dir):
                pathname = os.path.join(local_dir, filename)
                filehash = md5(pathname)  # 文件的hash值
                try:
                    create_time = get_FileCreateTime(pathname)  # 文件创建时间
                except:
                    print('文件不存在或已删除')
                    continue
                modify_time = get_FileModifyTime(pathname)  # 文件修改时间
                isfile = os.path.isfile(pathname)

                # 判断是否隐藏的文件
                if (False == is_hidden(filename)):
                    if (False == isfile):
                        # print('modify_time_dir=======', filename)
                        # _cache_hash[filehash] = {"create_time": create_time, "modify_time": modify_time, "path": pathname}
                        if (filehash in self._cache_hash):  # 有缓存
                            # 文件发生了改变
                            if (self._cache_hash[filehash]["modify_time"] != modify_time):
                                if (False == isfile):
                                    self._cache_hash[filehash]["modify_time"] = modify_time
                                    if self._cache_hash[filehash]["create_time"] != create_time:
                                        if self._cache_hash[filehash]["path"] != pathname:
                                            print('modifyPath', pathname)
                                            self._modify_file_list.append(pathname)  # 文件夹也要同步上传

                            pass
                        else:  # 没有缓存
                            self._cache_hash[filehash] = {"create_time": create_time, "modify_time": modify_time,
                                                     "path": pathname}
                            self._create_file_list.append(pathname)

                        self.list_dir(pathname)

                    else:
                        # print(pathname)
                        # 是否已经缓存
                        if (filehash in self._cache_hash):  # 有缓存
                            # 文件发生了改变
                            if (self._cache_hash[filehash]["modify_time"] != modify_time):
                                if (False == isfile):
                                    self.list_dir(pathname)
                                    self._cache_hash[filehash]["modify_time"] = modify_time
                                else:
                                    print('modifyPathname', pathname)
                                    self._modify_file_list.append(pathname)  # 文件同步
                                self._cache_hash[filehash]["modify_time"] = modify_time
                        else:  # 没有缓存 初始化
                            # print('debug')
                            self._cache_hash[filehash] = {"create_time": create_time, "modify_time": modify_time,
                                                     "path": pathname}
                            self._create_file_list.append(pathname)
        else:
            print(local_dir, '不存在,取消同步')
            raise  # 抛出异常让程序终止
        return self._cache_hash

    # 开始连接SFTP

    def upload(self, file_list, local_dir, remote_dir, jobName='client'):
        print('real_time_local_dir', local_dir)
        print('file_list', len(file_list))
        try:
            self.sftp_sftp.reconnect(jobName)
            is_date_dir_change = self._config.get("client", 'is_date_dir_change')
            for filename in file_list:
                # if filename in _have_upload:
                #     continue
                file_list_item = filename
                # print('----filename', file_list_item)
                isfile = os.path.isfile(filename)
                config = configparser.ConfigParser()
                config.read(os.path.join(BASE_DIR, conf), encoding=ENCODING)
                is_real_time = config.get(jobName, 'is_real_time')

                if is_real_time == '0':
                    print('close sync', IS_REAL_TIME_SYNC)
                    return 0
                # time.sleep(0.2)
                if isfile:
                    filename = filename.replace("\\", "/").replace(local_dir, "", 1).replace('/', "",
                                                                                             1)  # 匹配第一次避免/1122/test/12.txt这种全被替换
                    # print((time.strftime('%H:%M:%S', time.localtime(time.time())) +
                    #        ':' + filename.replace(local_dir, "", 1)).ljust(50, ' '), end='')
                    try:
                        # print('filename_remote_dir', filename, remote_dir)
                        if (os.path.join(local_dir, filename).lower().endswith(
                                ('.bmp', '.dib', '.png', '.jpg', '.jpeg', '.pbm', '.pgm', '.ppm', '.tif', '.tiff'))and is_compress_img == '1'):
                            is_compress_img = config.get(jobName, 'is_compress_img')
                            if is_compress_img == '1':
                                print('压缩图片...')
                                file_name = local_dir+'/'+filename
                                compress_image(file_name, jobName)
                                self.sftp_sftp.upload_file(local_dir.split('/')[0]+'/_tmp_compress'+jobName, filename, self.sftp_sftp, remote_dir)
                                shutil.rmtree(local_dir.split('/')[0]+'/_tmp_compress'+jobName)
                            # else:
                            #     self.sftp_sftp.upload_file(local_dir, filename, self.sftp_sftp, remote_dir)
                        else:
                            # if (is_date_dir_change == "1"):
                            #     re_search = re.search(r'\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2}-\d{3}', filename)
                            #     print('本地目录', filename)
                            #     if (re_search):
                            #         print('匹配时间-local-file', re_search.group(), filename)
                            #         filename = filename.replace(re_search.group(), "/".join(re_search.group().split("-")[0:4]))
                            #         # filename += "/".join(re_search.group().split("-")[0:4])
                            #         print('dir-change-local_dir', filename)
                            self.sftp_sftp.upload_file(local_dir, filename, self.sftp_sftp, remote_dir)
                            self._upload_file_list.append(filename)
                        pass
                    except Exception as e:
                        print('upload_file_error', e)
                        pass
                else:
                    filename = filename.replace("\\", "/")
                    remote = remote_dir + filename.replace(local_dir, "", 1)
                    if remote_dir == '/':  # 防止remote=/\\新建文件夹,新建第一层文件夹时候
                        remote = remote_dir + filename.replace(local_dir, "", 1).replace("\\", "",
                                                                                         1)  # replace("\\", "", 1)这里的1防止多层新建的时候都替换了
                    # print('远程目录_', remote)
                    # print(filename)
                    if(is_date_dir_change == "1"):
                        re_search = re.search(r'\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2}-\d{3}',remote)
                        if(re_search):
                            print('match-datetime-dir', re_search.group(), remote)
                            remote = remote.replace(re_search.group(), '')
                            remote += "/".join(re_search.group().split("-")[0:4])
                            print('dir-change', remote)
                    self.sftp_sftp.mkd_dir(self.sftp_sftp, filename, remote)
                    self._upload_file_list.append(filename)
                # print('\b' * 20, end='')#下一行打印不换行
                # print('[ok]'.rjust(20, ' '))#20个空格后打印ok
                # _have_upload.append(file_list_item)
            print('传输完成')
            return True
        except Exception as e:
            print("连接问题:中断或超时", e)
            return 0

    def backupAndClear(self, local_dir):
        is_clear_dir = self._config.get("client", 'is_clear_dir')
        if(is_clear_dir=='1'):
            # for file_name in os.listdir(local_dir):
            #     filenames = local_dir + r'/{}'.format(file_name)
            #     if os.path.isdir(filenames):
            #         shutil.rmtree(filenames)
            #     else:
            #         os.remove(filenames)
            for file_name in self._upload_file_list:
                print('删除', file_name)
                if os.path.isdir(file_name):
                    try:
                        shutil.rmtree(file_name)
                    except:
                        pass
                else:
                    filenames = local_dir + r'/{}'.format(file_name)
                    try:
                        os.remove(filenames)
                    except:
                        pass
            del self._upload_file_list[:]
        self.refresh_cachehash()

    def real_time_sync(self, jobName, remote_dir, signal: pyqtBoundSignal):
        # 初始化
        # print("初始化...")
        # list_dir(local_dir)

        # global sftp, config
        # sftp = Sftp(config['host'], config['port'], config['user'], config['pass'])
        self.sftp_sftp = FtpClient.ftp()
        print("开始监控文件...", IS_REAL_TIME_SYNC)
        counts = 0
        self.refresh_cachehash()  # 清空缓存
        save_obj({}, jobName + "_cache_statistics")
        # watch_interval = config.get('client', 'watch_interval')
        while (True):

            # global _cache_hash

            config = configparser.ConfigParser()
            config.read(os.path.join(BASE_DIR, conf), encoding=ENCODING)
            is_real_time = config.get(jobName, 'is_real_time')
            local_dir = config.get(jobName, 'local_dir')
            remote_dir = config.get(jobName, 'remote_dir')

            watch_interval = config.get('client', 'watch_interval')
            if is_real_time == '0':
                print('break', IS_REAL_TIME_SYNC)
                break
                return
            # else:
            #     counts = counts+1
            #     if counts != 1:
            #     # 清空列表 初始化的数据不记录首次不清理,取消同步之后再打开继续传
            #         clear_list()
            #     pass
            self.clear_list()
            # 遍历文件目录
            ldir = self.list_dir(local_dir)
            # _cache_statistics = load_obj(jobName+"_cache_statistics")
            # _cache_statistics["total"] = len(ldir.keys())
            # _cache_statistics["succeeded"] = 0
            # # if _cache_statistics["succeeded"] == _cache_statistics["total"]:
            # #     clear_list()
            # save_obj(_cache_statistics, jobName+"_cache_statistics")
            # signal.emit('共有文件及文件夹' + len(ldir.keys()).__str__())
            # 获取对应修改的文件
            # 删除的文件
            for filename in self._delete_file_list:
                print(filename + ' 删除')
            # 新建的文件
            lengths = len(self._create_file_list)

            if (lengths > 0):
                # 开始上传
                print("create...", len(self._create_file_list))
                while True:
                    sizes = os.path.getsize(self._create_file_list[lengths - 1])
                    time.sleep(float(watch_interval))#watch_interval
                    config = configparser.ConfigParser()
                    config.read(os.path.join(BASE_DIR, conf), encoding=ENCODING)
                    is_real_time = config.get(jobName, 'is_real_time')

                    if is_real_time == '0':
                        print('close sync', IS_REAL_TIME_SYNC)
                        return 0
                    self.clear_list()
                    self.refresh_cachehash()
                    lens = len(self.list_dir(local_dir).keys())
                    if lens > lengths:
                        print('copy....')
                        lengths = lens
                        continue
                    if os.path.getsize(self._create_file_list[lengths - 1]) < sizes:
                        print('copy...')
                        continue
                    _cache_statistics = load_obj(jobName + "_cache_statistics")
                    print("_cache_statistics...", jobName)
                    _cache_statistics["total"] = lens
                    _cache_statistics["succeeded"] = 0
                    save_obj(_cache_statistics, jobName + "_cache_statistics")
                    break
                print('start...')
                trystep = 0
                try:
                    isbreak = self.upload(self._create_file_list, local_dir, remote_dir, jobName)
                    trystep = 1
                    # shutil.copytree(local_dir, local_dir.split('/')[0] + r'/ftp__/{}'.format('__备份') + local_dir[
                    #           local_dir.index('/'):] )#+ time.strftime('%Y%m%d%H%M%S',time.localtime(time.time()))
                    unique_copy(local_dir, local_dir.split('/')[0] + r'/ftp__/{}'.format('__备份') + local_dir[
                              local_dir.index('/'):])
                    trystep = 2
                    if isbreak != 0:
                        self.backupAndClear(local_dir)
                    else:
                        break
                except Exception as e:
                    print('upload_failed1', e)
            # 修改的文件
            if (len(self._modify_file_list) > 0):
                # 开始上传
                print("modify...")
                print(self._modify_file_list)
                try:
                    isbreak = self.upload(self._modify_file_list, local_dir, remote_dir, jobName)
                    shutil.copytree(local_dir, local_dir.split('/')[0] + r'/ftp__/{}'.format('__备份') + time.strftime(
                        '%Y%m%d%H%M%S',
                        time.localtime(time.time())))
                    if isbreak != 0:
                        self.backupAndClear(local_dir)
                    else:
                        break
                except Exception as e:
                    print('upload_failed', e)
            # print('list_dir _create_file_list ', _create_file_list)

            # 清空记录 准备下一轮
            # clear_list()
            save_obj(self._cache_hash, "_cache_hash")

            time.sleep(1)

    def refresh_cachehash(self):
        # global _cache_hash
        self._cache_hash = {}
if __name__=='__main__':
    local_dir = '/src/ui'
    # while (True):
    #     list_dir(local_dir)
    #     time.sleep(3)