import os
import ftplib
import logging
import sqlite3
import time
from datetime import datetime
from threading import Lock
from database import db_manager
from ftp_manager import ftp_manager
from websocket_manager import websocket_manager
import hashlib
import tempfile

sync_logger = logging.getLogger('sync')


class FileSyncer:
    _instances = {}
    _lock = Lock()
    _recent_syncs = {}  # 用于跟踪最近的同步操作
    _sync_cooldown = 5.0  # 增加同步冷却时间到5秒
    _min_file_size_threshold = 1  # 最小文件大小阈值（字节）

    def __new__(cls, sync_dir_id, db_path):
        """使用单例模式，每个sync_dir_id一个实例"""
        with cls._lock:
            if sync_dir_id not in cls._instances:
                instance = super(FileSyncer, cls).__new__(cls)
                instance._initialized = False
                cls._instances[sync_dir_id] = instance
            return cls._instances[sync_dir_id]

    def __init__(self, sync_dir_id, db_path):
        """初始化实例（只在第一次创建时执行）"""
        if self._initialized:
            return

        self.sync_dir_id = sync_dir_id
        self.db_path = db_path
        self.sync_info = self.get_sync_info()
        self.chunk_size = 8192  # 8KB 块大小
        self._ftp = None
        self._last_ftp_use = 0
        self._ftp_timeout = 300  # 5分钟超时
        self._initialized = True

    def refresh_sync_info(self):
        """刷新同步信息缓存"""
        try:
            self.sync_info = self.get_sync_info()
            sync_logger.info(
                f"同步信息已刷新: dir_id={self.sync_dir_id}, strategy={self.sync_info.get('sync_strategy')}")
            return True
        except Exception as e:
            sync_logger.error(f"刷新同步信息失败: {str(e)}")
            return False

    @property
    def ftp(self):
        """获取FTP连接，使用统一的FTP管理器，带超时和重连机制"""
        try:
            current_time = time.time()

            # 检查连接是否超时
            if self._ftp and (current_time - self._last_ftp_use) > self._ftp_timeout:
                sync_logger.info("FTP连接超时，重新连接")
                try:
                    self._ftp.quit()
                except:
                    pass
                self._ftp = None

            if not self._ftp:
                try:
                    self._ftp = ftp_manager.get_connection(
                        self.sync_info['ftp_host'],
                        self.sync_info['ftp_user'],
                        self.sync_info['ftp_pass'],
                        self.sync_info['ftp_path']
                    )
                except Exception as e:
                    sync_logger.error(f"获取FTP连接失败: {str(e)}")
                    self._ftp = None
                    raise

            self._last_ftp_use = current_time
            return self._ftp
        except Exception as e:
            sync_logger.error(f"FTP连接处理失败: {str(e)}")
            self._ftp = None
            raise

    def get_sync_info(self):
        """获取同步目录信息"""
        try:
            with db_manager.get_connection() as db:
                row = db.execute(
                    'SELECT id, local_path, ftp_host, ftp_user, ftp_pass, ftp_path, sync_strategy FROM sync_dirs WHERE id = ?',
                    (self.sync_dir_id,)
                ).fetchone()
                if row:
                    return dict(row)

                # 如果在数据库中找不到记录，记录错误并返回None
                sync_logger.error(f"找不到同步目录信息: sync_dir_id={self.sync_dir_id}，可能已被删除")

                # 从单例实例中移除
                with self._lock:
                    if self.sync_dir_id in self._instances:
                        del self._instances[self.sync_dir_id]

                raise Exception(f"找不到同步目录信息: sync_dir_id={self.sync_dir_id}，同步器已被清理")
        except Exception as e:
            sync_logger.error(f"获取同步信息失败: {str(e)}")
            raise

    def _should_skip_sync(self, file_path, operation):
        """检查是否应该跳过同步（防止循环同步）"""
        current_time = time.time()
        sync_key = f"{file_path}:{operation}"

        with self._lock:
            # 清理过期的同步记录
            expired_keys = [k for k, v in self._recent_syncs.items()
                            if current_time - v > self._sync_cooldown]
            for k in expired_keys:
                del self._recent_syncs[k]

            # 检查是否在冷却期内
            if sync_key in self._recent_syncs:
                last_sync_time = self._recent_syncs[sync_key]
                if current_time - last_sync_time < self._sync_cooldown:
                    sync_logger.debug(f"跳过同步 {file_path}: 在冷却期内")
                    return True

            # 记录本次同步时间
            self._recent_syncs[sync_key] = current_time
            return False

    def check_remote_file_exists(self, remote_path):
        """检查远程文件是否存在"""
        try:
            # 规范化路径
            remote_path = remote_path.replace('\\', '/')
            
            # 尝试获取文件大小
            try:
                size = self.ftp.size(remote_path)
                return size is not None
            except ftplib.error_perm as e:
                if "550" in str(e):  # 文件不存在
                    return False
                raise
        except Exception as e:
            sync_logger.error(f"检查远程文件失败: {str(e)}")
            return False

    def sync_file(self, rel_path, operation='sync', force_create=False):
        """同步单个文件"""
        try:
            local_path = os.path.join(self.sync_info['local_path'], rel_path)
            remote_path = os.path.join(self.sync_info['ftp_path'], rel_path).replace('\\', '/')

            # 获取同步策略
            sync_strategy = self.sync_info.get('sync_strategy', 'none')

            if sync_strategy == 'none':
                sync_logger.warning(f"未设置同步策略，跳过同步: {rel_path}")
                return False

            # 检查文件是否存在
            local_exists = os.path.exists(local_path)
            remote_exists = self.check_remote_file_exists(remote_path)

            # 根据同步策略决定同步方向
            if sync_strategy == 'local_priority':
                if local_exists:
                    # 根据服务器上是否存在文件决定操作类型
                    operation = 'create' if not remote_exists else 'modify'
                    sync_logger.info(f"本地优先策略: 文件{'不存在' if not remote_exists else '存在'}于服务器，使用{operation}操作")
                    self.upload_file(rel_path, operation)
                    self.record_sync_history(rel_path, 'local_to_server', operation)
                elif remote_exists:
                    self.download_file(rel_path)
                    # 根据本地文件是否存在决定操作类型
                    operation = 'create' if not local_exists else 'modify'
                    self.record_sync_history(rel_path, 'server_to_local', operation)
            elif sync_strategy == 'server_priority':
                if remote_exists:
                    self.download_file(rel_path)
                    # 根据本地文件是否存在决定操作类型
                    operation = 'create' if not local_exists else 'modify'
                    self.record_sync_history(rel_path, 'server_to_local', operation)
                elif local_exists:
                    # 使用传入的操作类型，如果指定了force_create则强制使用create
                    operation = 'create' if force_create else 'create'  # 对于服务器优先策略，新文件总是create
                    self.upload_file(rel_path, operation)
                    self.record_sync_history(rel_path, 'local_to_server', operation)
            else:  # time_size strategy
                if local_exists and remote_exists:
                    local_time = os.path.getmtime(local_path)
                    remote_time = self.get_remote_mtime(remote_path)

                    if abs(local_time - remote_time) < 1:  # 1秒内的差异视为相同
                        return True

                    if local_time > remote_time:
                        # 使用传入的操作类型，如果指定了force_create则强制使用create
                        operation = 'create' if force_create else 'modify'
                        self.upload_file(rel_path, operation)
                        self.record_sync_history(rel_path, 'local_to_server', operation)
                    else:
                        self.download_file(rel_path)
                        self.record_sync_history(rel_path, 'server_to_local', 'modify')
                elif local_exists:
                    # 使用传入的操作类型，如果指定了force_create则强制使用create
                    operation = 'create' if force_create else 'create'  # 新文件总是create
                    self.upload_file(rel_path, operation)
                    self.record_sync_history(rel_path, 'local_to_server', operation)
                elif remote_exists:
                    self.download_file(rel_path)
                    self.record_sync_history(rel_path, 'server_to_local', 'create')

            return True

        except Exception as e:
            sync_logger.error(f"同步文件失败: {str(e)}")
            raise

    def sync_to_server(self, file_path, operation='modify'):
        """同步文件到FTP服务器"""
        try:
            # 规范化路径
            file_path = file_path.replace('\\', '/')
            local_full_path = os.path.join(self.sync_info['local_path'], file_path)

            if not os.path.exists(local_full_path):
                sync_logger.error(f"本地文件不存在: {file_path}")
                return False

            # 检查文件大小
            local_size = os.path.getsize(local_full_path)
            if local_size < self._min_file_size_threshold:
                sync_logger.warning(f"文件大小小于阈值({self._min_file_size_threshold}字节)，等待写入完成: {file_path}")
                time.sleep(1.0)  # 等待1秒
                local_size = os.path.getsize(local_full_path)
                if local_size < self._min_file_size_threshold:
                    sync_logger.warning(f"文件仍然小于阈值，跳过上传: {file_path}")
                    return False

            # 确保远程目录存在
            remote_dir = os.path.dirname(file_path)
            if remote_dir:
                self.ensure_remote_dir(remote_dir)

            # 上传文件
            upload_attempts = 0
            max_attempts = 3
            last_error = None

            while upload_attempts < max_attempts:
                try:
                    with open(local_full_path, 'rb') as f:
                        # 尝试不同的编码方式上传文件
                        success = False

                        # 1. 直接尝试
                        try:
                            self.ftp.storbinary(f'STOR {file_path}', f, blocksize=8192)
                            success = True
                            sync_logger.info(f"直接上传文件成功: {file_path}")
                        except Exception as direct_e:
                            if upload_attempts < max_attempts - 1:  # 如果还有重试机会，重新打开文件
                                f.seek(0)

                            # 2. 尝试UTF-8编码
                            try:
                                encoded_path = file_path.encode('utf-8').decode('latin1')
                                self.ftp.storbinary(f'STOR {encoded_path}', f, blocksize=8192)
                                success = True
                                sync_logger.info(f"使用UTF-8编码上传文件成功: {file_path}")
                            except Exception as utf8_e:
                                if upload_attempts < max_attempts - 1:
                                    f.seek(0)

                                # 3. 尝试GBK编码
                                try:
                                    encoded_path = file_path.encode('gbk').decode('latin1')
                                    self.ftp.storbinary(f'STOR {encoded_path}', f, blocksize=8192)
                                    success = True
                                    sync_logger.info(f"使用GBK编码上传文件成功: {file_path}")
                                except Exception as gbk_e:
                                    if upload_attempts == max_attempts - 1:  # 最后一次尝试失败
                                        sync_logger.error(f"所有编码方式上传都失败: {file_path}")
                                        sync_logger.error(f"直接上传错误: {str(direct_e)}")
                                        sync_logger.error(f"UTF-8编码错误: {str(utf8_e)}")
                                        sync_logger.error(f"GBK编码错误: {str(gbk_e)}")
                                        raise Exception("所有上传方式都失败")

                        # 如果上传成功，验证文件大小
                        if success:
                            try:
                                remote_size = self.ftp.size(file_path)
                                if remote_size == local_size:
                                    sync_logger.info(f"文件上传成功: {file_path} (大小: {local_size} 字节)")
                                    # 记录同步历史
                                    self.record_sync_history(os.path.basename(file_path), 'local_to_server', operation)
                                    return True
                                else:
                                    raise Exception(f"文件大小不匹配: 本地={local_size}, 远程={remote_size}")
                            except ftplib.error_perm as e:
                                raise Exception(f"无法验证上传: {str(e)}")

                except Exception as e:
                    last_error = e
                    upload_attempts += 1
                    if upload_attempts < max_attempts:
                        sync_logger.warning(f"上传失败，尝试重试 ({upload_attempts}/{max_attempts}): {str(e)}")
                        self._ftp = None  # 重置FTP连接
                        time.sleep(2)  # 等待2秒后重试
                    else:
                        sync_logger.error(f"上传失败，已达到最大重试次数: {str(e)}")
                        raise last_error

            return False

        except Exception as e:
            sync_logger.error(f"上传文件失败: {file_path}, 错误: {str(e)}")
            raise

    def sync_from_server(self, file_path):
        """从服务器同步文件到本地"""
        try:
            # 规范化路径
            file_path = file_path.replace('\\', '/')
            local_full_path = os.path.join(self.sync_info['local_path'], file_path)

            # 确保本地目录存在
            local_dir = os.path.dirname(local_full_path)
            if local_dir and not os.path.exists(local_dir):
                os.makedirs(local_dir)

            try:
                # 下载文件前检查本地文件是否存在及大小
                local_size = None
                if os.path.exists(local_full_path):
                    local_size = os.path.getsize(local_full_path)
                    sync_logger.info(f"本地文件存在: {file_path}, 大小: {local_size} 字节")
                else:
                    sync_logger.info(f"本地文件不存在: {file_path}")

                # 获取远程文件大小
                try:
                    remote_size = self.ftp.size(file_path)
                    sync_logger.info(f"服务器文件大小: {file_path}, 大小: {remote_size} 字节")
                except ftplib.error_perm as e:
                    if "550" in str(e):  # 文件不存在
                        sync_logger.info(f"服务器上文件不存在: {file_path}")
                        # 如果是服务器优先策略，删除本地文件
                        if self.sync_info['sync_strategy'] == 'server_priority' and os.path.exists(local_full_path):
                            sync_logger.info(f"服务器优先策略: 删除本地文件 {file_path}")
                            os.remove(local_full_path)
                        return False
                    raise

                # 如果本地文件存在且大小相同，跳过下载
                if local_size is not None and local_size == remote_size:
                    sync_logger.info(f"本地文件与服务器文件大小相同，跳过下载: {file_path}")
                    return True
                else:
                    if local_size is not None:
                        sync_logger.info(
                            f"文件大小不同，需要下载: {file_path} (本地: {local_size}, 服务器: {remote_size})")
                    else:
                        sync_logger.info(f"本地文件不存在，需要下载: {file_path}")

                # 创建临时文件名，避免直接覆盖
                temp_dir = os.path.dirname(local_full_path)
                temp_filename = f".tmp_{os.path.basename(local_full_path)}_{int(time.time())}"
                temp_path = os.path.join(temp_dir, temp_filename)

                # 下载文件到临时位置
                sync_logger.info(f"开始下载文件到临时位置: {temp_path}")
                with open(temp_path, 'wb') as f:
                    self.ftp.retrbinary(f'RETR {file_path}', f.write)

                # 下载完成后，安全地替换文件
                try:
                    # 在Windows上，需要先删除目标文件
                    if os.name == 'nt' and os.path.exists(local_full_path):
                        os.replace(temp_path, local_full_path)
                        sync_logger.info(f"使用replace替换文件: {local_full_path}")
                    else:
                        # 在Unix系统上，os.rename是原子操作
                        os.rename(temp_path, local_full_path)
                        sync_logger.info(f"使用rename替换文件: {local_full_path}")
                except Exception as e:
                    sync_logger.error(f"替换文件失败: {str(e)}")
                    # 如果替换失败，尝试删除后再移动
                    if os.path.exists(local_full_path):
                        os.remove(local_full_path)
                        sync_logger.info(f"删除目标文件后重试: {local_full_path}")
                    os.rename(temp_path, local_full_path)

                # 验证下载后的文件大小
                if os.path.exists(local_full_path):
                    final_size = os.path.getsize(local_full_path)
                    if final_size == remote_size:
                        sync_logger.info(f"文件下载成功，大小匹配: {file_path} ({final_size} 字节)")
                    else:
                        sync_logger.warning(
                            f"文件下载后大小不匹配: {file_path} (本地: {final_size}, 服务器: {remote_size})")

                sync_logger.info(f"文件下载成功: {file_path}")

                # 记录同步历史
                operation = 'create' if local_size is None else 'modify'
                self.record_sync_history(os.path.basename(file_path), 'server_to_local', operation)
                return True

            except ftplib.error_perm as e:
                if "550" in str(e):  # 文件不存在
                    sync_logger.info(f"服务器上文件不存在: {file_path}")
                    # 如果是服务器优先策略，删除本地文件
                    if self.sync_info['sync_strategy'] == 'server_priority' and os.path.exists(local_full_path):
                        sync_logger.info(f"服务器优先策略: 删除本地文件 {file_path}")
                        os.remove(local_full_path)
                    return False
                raise

        except Exception as e:
            sync_logger.error(f"下载文件失败: {file_path}, 错误: {str(e)}")
            raise

    def _sync_by_time_size(self, file_path):
        """根据时间和大小决定同步方向"""
        try:
            # 规范化路径
            file_path = file_path.replace('\\', '/')
            local_full_path = os.path.join(self.sync_info['local_path'], file_path)

            # 获取本地文件信息
            local_size = os.path.getsize(local_full_path)
            local_mtime = os.path.getmtime(local_full_path)

            try:
                # 获取远程文件信息
                remote_size = self.ftp.size(file_path)
                remote_mtime = self._get_remote_mtime(file_path)

                # 比较时间和大小
                if remote_mtime > local_mtime or (remote_mtime == local_mtime and remote_size > local_size):
                    self.sync_from_server(file_path)
                else:
                    self.sync_to_server(file_path)

            except ftplib.error_perm as e:
                if str(e).startswith('550'):  # 文件不存在
                    self.sync_to_server(file_path)
                else:
                    raise

        except Exception as e:
            sync_logger.error(f"时间大小比较同步失败: {file_path}, 错误: {str(e)}")
            raise

    def calculate_md5(self, file_path, chunk_size=8192):
        """计算文件的MD5值"""
        md5 = hashlib.md5()
        try:
            with open(file_path, 'rb') as f:
                for chunk in iter(lambda: f.read(chunk_size), b''):
                    md5.update(chunk)
            return md5.hexdigest()
        except Exception as e:
            sync_logger.error(f"计算文件MD5失败: {file_path}, 错误: {str(e)}")
            return None

    def get_remote_md5(self, ftp_path):
        """尝试获取远程文件的MD5值（如果FTP服务器支持）"""
        try:
            # 尝试使用XCRC命令
            try:
                resp = self.ftp.sendcmd(f"XCRC {ftp_path}")
                if resp.startswith('213'):
                    return resp[4:].strip()
            except:
                pass

            # 尝试使用XMD5命令
            try:
                resp = self.ftp.sendcmd(f"XMD5 {ftp_path}")
                if resp.startswith('213'):
                    return resp[4:].strip()
            except:
                pass

            # 如果服务器不支持MD5/CRC命令，下载文件并计算
            temp_file = os.path.join(tempfile.gettempdir(), f"temp_{os.getpid()}_{time.time()}")
            try:
                with open(temp_file, 'wb') as f:
                    self.ftp.retrbinary(f'RETR {ftp_path}', f.write)
                return self.calculate_md5(temp_file)
            finally:
                if os.path.exists(temp_file):
                    os.remove(temp_file)

        except Exception as e:
            sync_logger.error(f"获取远程文件MD5失败: {ftp_path}, 错误: {str(e)}")
            return None

    def ensure_remote_dir(self, path):
        """确保远程目录存在，如果不存在则创建（支持多级目录）"""
        try:
            # 规范化路径
            path = path.replace('\\', '/').strip('/')

            # 分割路径
            parts = [p for p in path.split('/') if p]
            current = ''

            # 逐级创建目录
            for part in parts:
                current = f"{current}/{part}" if current else part
                try:
                    # 先尝试直接访问目录
                    try:
                        self.ftp.cwd('/' + current)
                        continue  # 如果目录存在，继续下一个
                    except:
                        pass

                    # 如果目录不存在，尝试创建
                    try:
                        # 先尝试直接创建
                        self.ftp.mkd('/' + current)
                        sync_logger.info(f"直接创建远程目录成功: {current}")
                    except Exception as direct_e:
                        try:
                            # 如果直接创建失败，尝试使用UTF-8编码
                            encoded_path = current.encode('utf-8').decode('latin1')
                            self.ftp.mkd('/' + encoded_path)
                            sync_logger.info(f"使用UTF-8编码创建远程目录成功: {current}")
                        except Exception as utf8_e:
                            try:
                                # 如果UTF-8失败，尝试使用GBK编码
                                encoded_path = current.encode('gbk').decode('latin1')
                                self.ftp.mkd('/' + encoded_path)
                                sync_logger.info(f"使用GBK编码创建远程目录成功: {current}")
                            except Exception as gbk_e:
                                # 如果所有编码方式都失败，记录错误并抛出异常
                                sync_logger.error(f"所有编码方式创建目录都失败: {current}")
                                sync_logger.error(f"直接创建错误: {str(direct_e)}")
                                sync_logger.error(f"UTF-8编码错误: {str(utf8_e)}")
                                sync_logger.error(f"GBK编码错误: {str(gbk_e)}")
                                raise Exception(f"无法创建远程目录: {current}")

                except ftplib.error_perm as e:
                    if not str(e).startswith('550'):  # 忽略目录已存在的错误
                        raise

            # 返回根目录
            self.ftp.cwd('/')
            return True

        except Exception as e:
            sync_logger.error(f"创建远程目录失败: {path}, 错误: {str(e)}")
            raise

    def delete_from_server(self, file_path):
        """从FTP服务器删除文件"""
        try:
            # 规范化路径
            file_path = file_path.replace('\\', '/')

            for attempt in range(2):  # 最多尝试两次
                try:
                    # 检查文件是否存在
                    try:
                        self.ftp.size(file_path)
                    except ftplib.error_perm as e:
                        if "550" in str(e):  # 文件不存在
                            sync_logger.info(f"文件在服务器上不存在，无需删除: {file_path}")
                            return True
                        raise
                    except Exception as e:
                        # 如果是连接问题，重置FTP连接并重试
                        if 'sendall' in str(e) or 'Connection' in str(e):
                            sync_logger.warning(f"FTP连接可能断开，尝试重新连接: {str(e)}")
                            self._ftp = None
                            if attempt == 0:  # 只在第一次尝试时继续循环
                                continue
                        raise

                    # 删除文件
                    sync_logger.info(f"尝试删除FTP文件: {file_path}")
                    self.ftp.delete(file_path)
                    sync_logger.info(f"文件删除成功: {file_path}")

                    # 记录同步历史
                    self.record_sync_history(os.path.basename(file_path), 'local_to_server', 'delete')
                    return True

                except ftplib.error_perm as e:
                    if "550" not in str(e):  # 如果不是文件不存在的错误
                        sync_logger.error(f"删除文件失败: {str(e)}")
                        raise
                    return True  # 文件不存在，视为删除成功
                except Exception as e:
                    if attempt == 0:  # 第一次尝试失败
                        sync_logger.warning(f"删除文件第一次尝试失败，准备重试: {str(e)}")
                        self._ftp = None  # 重置FTP连接
                        continue
                    raise  # 第二次尝试失败，抛出异常

        except Exception as e:
            sync_logger.error(f"删除文件失败: {str(e)}")
            raise

    def handle_conflict(self, file_path):
        """处理文件冲突"""
        local_full_path = os.path.join(self.sync_info['local_path'], file_path)

        try:
            # 重命名本地文件
            base, ext = os.path.splitext(local_full_path)
            timestamp = time.strftime("%Y%m%d_%H%M%S")
            new_name = f"{base}_local_{timestamp}{ext}"

            sync_logger.info(f"检测到文件冲突: {file_path}")
            sync_logger.info(f"重命名本地文件为: {new_name}")

            os.rename(local_full_path, new_name)

            # 下载服务器文件
            self.sync_from_server(file_path)

            # 记录冲突历史
            self.record_sync_history(file_path, 'conflict', 'modify')

        except Exception as e:
            sync_logger.error(f"处理文件冲突失败: {str(e)}")
            raise

    def record_sync_history(self, filename, direction, operation):
        """记录同步历史"""
        try:
            with db_manager.get_connection() as db:
                # 插入同步历史记录
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                db.execute(
                    'INSERT INTO sync_history (sync_dir_id, filename, direction, operation, path, timestamp) VALUES (?, ?, ?, ?, ?, datetime(?))',
                    (self.sync_dir_id, filename, direction, operation, filename, current_time)
                )
                db.commit()

                # 获取刚插入的记录，包括本地路径
                history_item = db.execute('''
                                          SELECT h.*, d.local_path, d.ftp_host, d.ftp_path
                                          FROM sync_history h
                                                   JOIN sync_dirs d ON h.sync_dir_id = d.id
                                          WHERE h.sync_dir_id = ?
                                          ORDER BY h.id DESC LIMIT 1
                                          ''', (self.sync_dir_id,)).fetchone()

                # 通过WebSocket发送更新
                if history_item:
                    sync_logger.info(f"准备发送同步历史更新: dir_id={self.sync_dir_id}, operation={operation}")
                    websocket_manager.emit_sync_history(
                        self.sync_dir_id,
                        dict(history_item)
                    )
                    sync_logger.info(f"同步历史更新发送成功")

        except Exception as e:
            sync_logger.error(f"记录同步历史失败: {str(e)}")
            raise

    def delete_remote_dir(self, path):
        """递归删除远程目录"""
        try:
            # 规范化路径
            path = path.replace('\\', '/').strip('/')

            # 验证目录是否存在
            try:
                original_dir = self.ftp.pwd()
                self.ftp.cwd('/' + path if path else '/')
                self.ftp.cwd(original_dir)
            except ftplib.error_perm as e:
                if "550" in str(e):  # 目录不存在
                    sync_logger.warning(f"FTP目录不存在，无需删除: {path}")
                    return
                raise

            # 先删除目录中的所有文件和子目录
            try:
                self._delete_remote_dir_contents(path)
            except ftplib.error_perm as e:
                if "550" in str(e):  # 目录可能已经被删除
                    sync_logger.warning(f"目录内容可能已被删除: {path}")
                else:
                    raise

            # 然后删除目录本身
            try:
                if path:  # 不要尝试删除根目录
                    self.ftp.rmd('/' + path)
                    sync_logger.info(f"删除FTP目录成功: {path}")
                    # 记录同步历史
                    self.record_sync_history(os.path.basename(path), 'local_to_server', 'delete')
            except ftplib.error_perm as e:
                if "550" in str(e):  # 目录不存在或已被删除
                    sync_logger.warning(f"FTP目录已不存在: {path}")
                else:
                    raise

        except Exception as e:
            sync_logger.error(f"删除FTP目录失败: {path}, 错误: {str(e)}")
            raise

    def _delete_remote_dir_contents(self, path):
        """递归删除远程目录内容"""
        try:
            # 保存当前目录
            original_dir = self.ftp.pwd()

            # 切换到目标目录
            try:
                self.ftp.cwd('/' + path if path else '/')
            except ftplib.error_perm as e:
                if "550" in str(e):  # 目录不存在
                    return
                raise

            # 获取目录内容
            items = []
            try:
                self.ftp.dir(lambda x: items.append(x))
            except ftplib.error_perm as e:
                if "550" in str(e):  # 目录可能已被删除
                    return
                raise

            for item in items:
                try:
                    # 解析目录列表项
                    if '<DIR>' in item:  # Windows 风格
                        parts = item.split()
                        name = ' '.join(parts[3:])  # 目录名在第4个部分之后
                        is_dir = True
                    else:  # Unix 风格
                        parts = item.split(None, 8)
                        if len(parts) < 9:
                            continue
                        name = parts[8]
                        is_dir = item.startswith('d')

                    # 忽略 . 和 .. 目录
                    if name in ('.', '..'):
                        continue

                    # 构建完整路径
                    item_path = f"{path}/{name}".lstrip('/')

                    if is_dir:
                        # 递归删除子目录
                        self._delete_remote_dir_contents(item_path)
                        try:
                            self.ftp.rmd('/' + item_path)
                            sync_logger.info(f"删除FTP子目录成功: {item_path}")
                            # 记录同步历史
                            self.record_sync_history(os.path.basename(item_path), 'local_to_server', 'delete')
                        except ftplib.error_perm as e:
                            if "550" not in str(e):  # 忽略"目录不存在"错误
                                raise
                    else:
                        # 删除文件
                        try:
                            self.ftp.delete('/' + item_path)
                            sync_logger.info(f"删除FTP文件成功: {item_path}")
                            # 记录同步历史
                            self.record_sync_history(os.path.basename(item_path), 'local_to_server', 'delete')
                        except ftplib.error_perm as e:
                            if "550" not in str(e):  # 忽略"文件不存在"错误
                                raise

                except Exception as e:
                    sync_logger.error(f"处理目录项失败: {item}, 错误: {str(e)}")
                    raise

            # 返回原始目录
            try:
                self.ftp.cwd(original_dir)
            except ftplib.error_perm:
                # 如果原始目录不存在，尝试返回根目录
                self.ftp.cwd('/')

        except Exception as e:
            sync_logger.error(f"删除目录内容失败: {path}, 错误: {str(e)}")
            raise

    def rename_remote_dir(self, old_path, new_path):
        """重命名远程目录"""
        try:
            # 规范化路径
            old_path = old_path.replace('\\', '/')
            new_path = new_path.replace('\\', '/')

            sync_logger.info(f"准备重命名FTP目录: {old_path} -> {new_path}")

            # 确保目标目录的父目录存在
            parent_dir = os.path.dirname(new_path)
            if parent_dir:
                self.ensure_remote_dir(parent_dir)

            try:
                # 尝试直接重命名
                self.ftp.rename(old_path, new_path)
                sync_logger.info(f"重命名FTP目录成功: {old_path} -> {new_path}")
                return
            except Exception as e:
                if "No mapping for the Unicode character" in str(e):
                    sync_logger.info("检测到中文路径，尝试使用GBK编码")
                else:
                    raise

            # 如果直接重命名失败，尝试使用GBK编码
            try:
                old_path_encoded = old_path
                new_path_encoded = new_path

                # 如果路径包含中文，进行编码转换
                if any('\u4e00' <= c <= '\u9fff' for c in old_path):
                    old_path_encoded = old_path.encode('gbk').decode('latin1')
                if any('\u4e00' <= c <= '\u9fff' for c in new_path):
                    new_path_encoded = new_path.encode('gbk').decode('latin1')

                sync_logger.info(f"使用GBK编码重命名: {old_path_encoded} -> {new_path_encoded}")
                self.ftp.rename(old_path_encoded, new_path_encoded)
                sync_logger.info(f"使用GBK编码重命名成功")
                return
            except Exception as e:
                sync_logger.error(f"GBK编码重命名失败: {str(e)}")

                # 如果重命名失败，尝试删除旧目录并创建新目录
                sync_logger.info(f"尝试通过删除和创建来实现重命名")
                try:
                    # 递归复制目录内容
                    self._copy_remote_dir_contents(old_path, new_path)
                    # 删除旧目录
                    self.delete_remote_dir(old_path)
                    sync_logger.info(f"通过复制和删除完成目录重命名")
                    return
                except Exception as copy_error:
                    sync_logger.error(f"复制目录内容失败: {str(copy_error)}")
                    raise

        except Exception as e:
            sync_logger.error(f"重命名FTP目录失败: {old_path} -> {new_path}, 错误: {str(e)}")
            raise

    def _copy_remote_dir_contents(self, src_path, dst_path):
        """递归复制远程目录内容"""
        try:
            # 确保目标目录存在
            self.ensure_remote_dir(dst_path)

            # 保存当前目录
            original_dir = self.ftp.pwd()

            # 切换到源目录
            try:
                if any('\u4e00' <= c <= '\u9fff' for c in src_path):
                    src_path_encoded = src_path.encode('gbk').decode('latin1')
                    self.ftp.cwd('/' + src_path_encoded)
                else:
                    self.ftp.cwd('/' + src_path)
            except:
                sync_logger.error(f"无法访问源目录: {src_path}")
                raise

            # 获取目录内容
            items = []
            self.ftp.dir(lambda x: items.append(x))

            for item in items:
                try:
                    # 解析目录列表项
                    if '<DIR>' in item:  # Windows 风格
                        parts = item.split()
                        name = ' '.join(parts[3:])  # 目录名在第4个部分之后
                        is_dir = True
                    else:  # Unix 风格
                        parts = item.split(None, 8)
                        if len(parts) < 9:
                            continue
                        name = parts[8]
                        is_dir = item.startswith('d')

                    # 忽略 . 和 .. 目录
                    if name in ('.', '..'):
                        continue

                    # 构建源路径和目标路径
                    src_item_path = f"{src_path}/{name}".lstrip('/')
                    dst_item_path = f"{dst_path}/{name}".lstrip('/')

                    if is_dir:
                        # 递归复制子目录
                        self._copy_remote_dir_contents(src_item_path, dst_item_path)
                    else:
                        # 复制文件
                        self._copy_remote_file(src_item_path, dst_item_path)

                except Exception as e:
                    sync_logger.error(f"复制项目失败: {name}, 错误: {str(e)}")
                    raise

            # 返回原始目录
            self.ftp.cwd(original_dir)

        except Exception as e:
            sync_logger.error(f"复制目录内容失败: {src_path} -> {dst_path}, 错误: {str(e)}")
            raise

    def _copy_remote_file(self, src_path, dst_path):
        """复制远程文件"""
        try:
            # 创建临时文件
            with tempfile.NamedTemporaryFile(delete=False) as temp_file:
                temp_path = temp_file.name

            try:
                # 下载源文件
                with open(temp_path, 'wb') as f:
                    if any('\u4e00' <= c <= '\u9fff' for c in src_path):
                        src_path_encoded = src_path.encode('gbk').decode('latin1')
                        self.ftp.retrbinary(f'RETR {src_path_encoded}', f.write)
                    else:
                        self.ftp.retrbinary(f'RETR {src_path}', f.write)

                # 上传到新位置
                with open(temp_path, 'rb') as f:
                    if any('\u4e00' <= c <= '\u9fff' for c in dst_path):
                        dst_path_encoded = dst_path.encode('gbk').decode('latin1')
                        self.ftp.storbinary(f'STOR {dst_path_encoded}', f)
                    else:
                        self.ftp.storbinary(f'STOR {dst_path}', f)

            finally:
                # 清理临时文件
                try:
                    os.unlink(temp_path)
                except:
                    pass

        except Exception as e:
            sync_logger.error(f"复制文件失败: {src_path} -> {dst_path}, 错误: {str(e)}")
            raise

    def move_on_server(self, src_path, dst_path):
        """在服务器上移动文件"""
        try:
            # 确保目标目录存在
            dst_dir = os.path.dirname(dst_path)
            if dst_dir:
                self.ensure_remote_dir(dst_dir)

            # 重命名文件
            src_ftp_path = src_path.replace('\\', '/')
            dst_ftp_path = dst_path.replace('\\', '/')

            # 检查源文件是否存在
            try:
                self.ftp.size(src_ftp_path)
            except ftplib.error_perm as e:
                if "550" in str(e):  # 文件不存在
                    # 如果文件不存在，先上传文件
                    local_path = os.path.join(self.sync_info['local_path'], src_path)
                    if os.path.exists(local_path):
                        sync_logger.info(f"源文件在服务器上不存在，先上传文件: {src_ftp_path}")
                        self.sync_to_server(src_path)
                    else:
                        raise Exception(f"源文件在本地和服务器上都不存在: {src_path}")

            # 尝试不同的编码方式
            try:
                # 先尝试直接重命名
                self.ftp.rename(src_ftp_path, dst_ftp_path)
                sync_logger.info(f"直接重命名成功: {src_ftp_path} -> {dst_ftp_path}")
            except Exception as direct_e:
                try:
                    # 尝试使用UTF-8编码
                    src_encoded = src_ftp_path.encode('utf-8').decode('latin1')
                    dst_encoded = dst_ftp_path.encode('utf-8').decode('latin1')
                    self.ftp.rename(src_encoded, dst_encoded)
                    sync_logger.info(f"使用UTF-8编码重命名成功: {src_ftp_path} -> {dst_ftp_path}")
                except Exception as utf8_e:
                    try:
                        # 尝试使用GBK编码
                        src_encoded = src_ftp_path.encode('gbk').decode('latin1')
                        dst_encoded = dst_ftp_path.encode('gbk').decode('latin1')
                        self.ftp.rename(src_encoded, dst_encoded)
                        sync_logger.info(f"使用GBK编码重命名成功: {src_ftp_path} -> {dst_ftp_path}")
                    except Exception as gbk_e:
                        # 如果所有编码方式都失败，记录错误并尝试复制和删除方式
                        sync_logger.error(f"所有重命名方式都失败，尝试通过复制和删除来实现")
                        sync_logger.error(f"直接重命名错误: {str(direct_e)}")
                        sync_logger.error(f"UTF-8编码错误: {str(utf8_e)}")
                        sync_logger.error(f"GBK编码错误: {str(gbk_e)}")

                        # 尝试通过复制和删除来实现
                        try:
                            self._copy_remote_file(src_ftp_path, dst_ftp_path)
                            self.delete_from_server(src_ftp_path)
                            sync_logger.info(f"通过复制和删除完成移动: {src_ftp_path} -> {dst_ftp_path}")
                        except Exception as copy_e:
                            sync_logger.error(f"复制和删除方式也失败: {str(copy_e)}")
                            raise Exception(f"无法移动文件: {src_ftp_path} -> {dst_ftp_path}")

            # 记录同步历史
            self.record_sync_history(f"{src_path} -> {dst_path}", 'local_to_server', 'modify')
            sync_logger.info(f"文件移动成功: {src_ftp_path} -> {dst_ftp_path}")

        except Exception as e:
            sync_logger.error(f"移动文件失败: {str(e)}")
            raise

    def _get_all_files_in_dir(self, ftp_path, result_dict):
        """递归获取目录下所有文件的详细信息"""
        try:
            # 当前目录下的文件
            current_dir_files = {}

            # 获取当前目录文件列表
            file_list = []
            try:
                if any('\u4e00' <= c <= '\u9fff' for c in ftp_path):  # 检测中文字符
                    try:
                        # 尝试GBK编码
                        path_encoded = ftp_path.encode('gbk').decode('latin1')
                        file_list = self.ftp.nlst(path_encoded)
                        sync_logger.info(f"使用GBK编码成功获取目录 {ftp_path} 的文件列表")
                    except Exception as e:
                        sync_logger.error(f"GBK编码获取目录失败: {ftp_path}, 尝试直接获取: {str(e)}")
                        try:
                            file_list = self.ftp.nlst(ftp_path)
                        except Exception as e2:
                            sync_logger.error(f"直接获取目录列表也失败: {ftp_path}, 错误: {str(e2)}")
                            return {}
                else:
                    try:
                        file_list = self.ftp.nlst(ftp_path)
                    except Exception as e:
                        sync_logger.error(f"获取目录列表失败: {ftp_path}, 错误: {str(e)}")
                        return {}

                sync_logger.info(f"目录 {ftp_path} 中找到 {len(file_list)} 个项目")
            except Exception as e:
                sync_logger.error(f"获取目录列表过程中发生错误: {ftp_path}, 错误: {str(e)}")
                return {}

            # 处理每个文件/目录
            for item in file_list:
                # 跳过当前目录和父目录
                if item in ['.', '..'] or os.path.basename(item) in ['.', '..']:
                    continue

                # 标准化路径
                if ftp_path:
                    rel_path = f"{ftp_path}/{os.path.basename(item)}".replace('//', '/')
                else:
                    rel_path = os.path.basename(item)

                # 检查是否为目录
                is_dir = False
                try:
                    current_pwd = self.ftp.pwd()

                    # 中文路径处理
                    if any('\u4e00' <= c <= '\u9fff' for c in rel_path):
                        # 尝试GBK编码访问
                        try:
                            rel_path_encoded = rel_path.encode('gbk').decode('latin1')
                            self.ftp.cwd(rel_path_encoded)
                            is_dir = True
                            self.ftp.cwd(current_pwd)
                        except Exception:
                            # 尝试直接访问
                            try:
                                self.ftp.cwd(rel_path)
                                is_dir = True
                                self.ftp.cwd(current_pwd)
                            except Exception:
                                # 不是目录
                                pass
                    else:
                        # 非中文路径
                        try:
                            self.ftp.cwd(rel_path)
                            is_dir = True
                            self.ftp.cwd(current_pwd)
                        except Exception:
                            # 不是目录
                            pass
                except Exception as e:
                    sync_logger.debug(f"检查是否为目录时出错: {rel_path}, 错误: {str(e)}")
                    # 假设不是目录
                    is_dir = False

                if is_dir:
                    # 添加目录
                    result_dict[rel_path] = {'type': 'dir', 'size': 0}
                    sync_logger.debug(f"发现远程目录: {rel_path}")
                    # 递归处理子目录
                    self._get_all_files_in_dir(rel_path, result_dict)
                else:
                    # 获取文件大小
                    size = 0
                    try:
                        # 中文路径处理
                        if any('\u4e00' <= c <= '\u9fff' for c in rel_path):
                            try:
                                # 尝试GBK编码
                                rel_path_encoded = rel_path.encode('gbk').decode('latin1')
                                size = self.ftp.size(rel_path_encoded)
                            except Exception:
                                # 尝试直接获取
                                try:
                                    size = self.ftp.size(rel_path)
                                except Exception:
                                    sync_logger.debug(f"无法获取文件大小: {rel_path}")
                        else:
                            # 非中文路径
                            try:
                                size = self.ftp.size(rel_path)
                            except Exception:
                                sync_logger.debug(f"无法获取文件大小: {rel_path}")
                    except Exception as e:
                        sync_logger.debug(f"获取文件大小时出错: {rel_path}, 错误: {str(e)}")

                    # 添加文件
                    result_dict[rel_path] = {'type': 'file', 'size': size}
                    sync_logger.debug(f"发现远程文件: {rel_path}, 大小: {size}")

            return current_dir_files
        except Exception as e:
            sync_logger.error(f"递归获取文件信息失败: {str(e)}")
            return {}

    def check_server_changes(self, force_sync=False):
        """检查服务器端的变化并同步到本地

        Args:
            force_sync: 如果为True，则强制同步所有文件，忽略大小比较
        """
        try:
            sync_logger.info(f"开始检查服务器变化 (强制同步: {force_sync})")

            # 获取同步策略
            strategy = self.sync_info['sync_strategy']

            # 根据策略执行不同的同步方向
            if strategy == 'local_priority':
                sync_logger.info("检测到本地优先策略，执行本地到服务器的同步")
                self._sync_local_to_server(force_sync)
            elif strategy == 'server_priority':
                sync_logger.info("检测到服务器优先策略，执行服务器到本地的同步")
                self._sync_server_to_local(force_sync)
            else:  # time_size 策略
                sync_logger.info("检测到时间大小比较策略，执行双向同步")
                self._sync_server_to_local(force_sync)
                self._sync_local_to_server(force_sync)

        except Exception as e:
            sync_logger.error(f"检查服务器变化失败: {str(e)}")
            raise

    def _sync_local_to_server(self, force_sync=False):
        """将本地文件同步到服务器

        Args:
            force_sync: 如果为True，则强制同步所有文件，忽略大小比较
        """
        try:
            # 获取本地文件列表和大小
            local_files = {}
            local_path = self.sync_info['local_path']
            for root, dirs, files in os.walk(local_path):
                rel_root = os.path.relpath(root, local_path).replace('\\', '/')
                if rel_root == '.':
                    rel_root = ''

                # 记录目录
                if rel_root:
                    local_files[rel_root] = {'type': 'dir', 'size': 0}

                # 记录文件
                for file in files:
                    rel_path = os.path.join(rel_root, file).replace('\\', '/')
                    try:
                        size = os.path.getsize(os.path.join(root, file))
                        local_files[rel_path] = {'type': 'file', 'size': size}
                    except:
                        continue

            # 获取服务器文件列表
            server_files = {}
            self._list_remote_files('', server_files)

            # 使用备用方法补充
            backup_server_files = {}
            self._get_all_files_in_dir('', backup_server_files)

            # 合并结果
            for path, info in backup_server_files.items():
                if path not in server_files:
                    server_files[path] = info

            # 使用NLST方法补充
            try:
                root_files = []
                self.ftp.retrlines('NLST', root_files.append)

                for filename in root_files:
                    if filename in ['.', '..']:
                        continue

                    if filename not in [os.path.basename(path) for path in server_files.keys()]:
                        try:
                            size = self.ftp.size(filename)
                            if size is not None:
                                server_files[filename] = {'type': 'file', 'size': size}
                        except:
                            try:
                                original_dir = self.ftp.pwd()
                                self.ftp.cwd(filename)
                                self.ftp.cwd(original_dir)
                                server_files[filename] = {'type': 'dir', 'size': 0}
                            except:
                                server_files[filename] = {'type': 'file', 'size': 0}
            except:
                pass

            sync_logger.info(f"找到 {len(local_files)} 个本地文件，{len(server_files)} 个服务器文件")

            # 记录文件列表
            if local_files:
                sync_logger.info(f"本地文件列表: {', '.join(local_files.keys())}")
            if server_files:
                sync_logger.info(f"服务器文件列表: {', '.join(server_files.keys())}")

            # 处理本地有但服务器没有的文件，或内容不同的文件
            for path, info in local_files.items():
                try:
                    # 检查文件是否存在于服务器
                    file_exists_on_server = path in server_files

                    # 检查文件大小是否不同
                    size_different = False
                    if file_exists_on_server and info['type'] == 'file':
                        local_size = info['size']
                        server_size = server_files[path]['size']
                        size_different = local_size != server_size
                        if size_different:
                            sync_logger.info(f"文件大小不同: {path} (本地: {local_size}, 服务器: {server_size})")

                    # 如果服务器没有此文件，或文件大小不同，或强制同步，则上传到服务器
                    if not file_exists_on_server or size_different or force_sync:
                        sync_logger.info(
                            f"上传到服务器: {path} (强制同步: {force_sync}, 文件存在: {file_exists_on_server}, 大小不同: {size_different})")
                        if info['type'] == 'dir':
                            self.ensure_remote_dir(path)
                        else:
                            # 根据文件是否存在决定操作类型
                            operation = 'create' if not file_exists_on_server else 'modify'
                            sync_logger.info(f"文件{'不存在' if not file_exists_on_server else '存在'}于服务器，使用{operation}操作")
                            self.sync_to_server(path, operation)
                except Exception as e:
                    sync_logger.error(f"同步本地文件失败: {path}, 错误: {str(e)}")

            # 处理服务器有但本地没有的文件
            for path, info in server_files.items():
                if path not in local_files:
                    try:
                        sync_logger.info(f"删除服务器多余文件/目录: {path}")
                        if info['type'] == 'dir':
                            try:
                                # 尝试删除目录
                                self.delete_remote_dir(path)
                                # 记录同步历史
                                self.record_sync_history(os.path.basename(path), 'server_to_local', 'delete')
                            except Exception as e:
                                if "550" in str(e):  # 目录不存在
                                    sync_logger.info(f"目录已不存在: {path}")
                                else:
                                    sync_logger.error(f"删除目录失败: {path}, 错误: {str(e)}")
                        else:
                            try:
                                self.delete_from_server(path)
                                # 记录同步历史
                                self.record_sync_history(os.path.basename(path), 'server_to_local', 'delete')
                            except Exception as e:
                                if "550" in str(e):  # 文件不存在
                                    sync_logger.info(f"文件已不存在: {path}")
                                else:
                                    sync_logger.error(f"删除文件失败: {path}, 错误: {str(e)}")
                    except Exception as e:
                        sync_logger.error(f"处理服务器多余文件/目录失败: {path}, 错误: {str(e)}")
        except Exception as e:
            sync_logger.error(f"本地到服务器同步失败: {str(e)}")
            raise

    def _sync_server_to_local(self, force_sync=False):
        """将服务器文件同步到本地

        Args:
            force_sync: 如果为True，则强制同步所有文件，忽略大小比较
        """
        try:
            # 获取本地文件列表和大小
            local_files = {}
            local_path = self.sync_info['local_path']
            for root, dirs, files in os.walk(local_path):
                rel_root = os.path.relpath(root, local_path).replace('\\', '/')
                if rel_root == '.':
                    rel_root = ''

                # 记录目录
                if rel_root:
                    local_files[rel_root] = {'type': 'dir', 'size': 0}

                # 记录文件
                for file in files:
                    rel_path = os.path.join(rel_root, file).replace('\\', '/')
                    try:
                        size = os.path.getsize(os.path.join(root, file))
                        local_files[rel_path] = {'type': 'file', 'size': size}
                    except:
                        continue

            # 获取服务器文件列表
            server_files = {}

            # 使用标准方法获取文件列表
            self._list_remote_files('', server_files)
            standard_file_count = len(server_files)
            sync_logger.info(f"标准方法找到 {standard_file_count} 个服务器文件")

            # 记录找到的文件
            if server_files:
                sync_logger.info(f"标准方法找到的文件: {', '.join(server_files.keys())}")

            # 始终使用备用方法获取文件列表进行比较和补充
            backup_server_files = {}
            self._get_all_files_in_dir('', backup_server_files)
            backup_file_count = len(backup_server_files)
            sync_logger.info(f"备用方法找到 {backup_file_count} 个服务器文件")

            # 记录找到的文件
            if backup_server_files:
                sync_logger.info(f"备用方法找到的文件: {', '.join(backup_server_files.keys())}")

            # 如果备用方法找到了更多文件，使用备用方法的结果
            if backup_file_count > standard_file_count:
                sync_logger.info(
                    f"备用方法找到了更多文件: {backup_file_count} > {standard_file_count}，使用备用方法结果")
                server_files = backup_server_files

            # 合并结果（添加未包含的文件）
            for path, info in backup_server_files.items():
                if path not in server_files:
                    server_files[path] = info
                    sync_logger.info(f"从备用方法添加文件: {path}")

            # 使用第三种方法尝试获取根目录文件列表（特别是针对文件）
            try:
                root_files = []
                self.ftp.retrlines('NLST', root_files.append)
                sync_logger.info(f"根目录NLST命令找到 {len(root_files)} 个项目: {root_files}")

                for filename in root_files:
                    if filename in ['.', '..']:
                        continue

                    if filename not in [os.path.basename(path) for path in server_files.keys()]:
                        try:
                            # 尝试获取文件大小
                            size = self.ftp.size(filename)
                            if size is not None:
                                server_files[filename] = {'type': 'file', 'size': size}
                                sync_logger.info(f"从NLST命令添加文件: {filename}, 大小: {size}")
                        except:
                            # 如果无法获取大小，可能是目录
                            try:
                                original_dir = self.ftp.pwd()
                                self.ftp.cwd(filename)
                                self.ftp.cwd(original_dir)
                                # 是目录
                                if filename not in server_files:
                                    server_files[filename] = {'type': 'dir', 'size': 0}
                                    sync_logger.info(f"从NLST命令添加目录: {filename}")
                            except:
                                # 未知类型，假设为文件
                                server_files[filename] = {'type': 'file', 'size': 0}
                                sync_logger.info(f"从NLST命令添加未知类型项目: {filename}")
            except Exception as e:
                sync_logger.error(f"使用NLST获取根目录列表失败: {str(e)}")

            sync_logger.info(f"找到 {len(local_files)} 个本地文件，{len(server_files)} 个服务器文件")

            # 详细记录本地和服务器文件列表，用于调试
            if local_files:
                sync_logger.info(f"本地文件列表: {', '.join(local_files.keys())}")
            if server_files:
                sync_logger.info(f"服务器文件列表: {', '.join(server_files.keys())}")

            # 根据同步策略处理差异
            strategy = self.sync_info['sync_strategy']

            # 处理服务器上有但本地没有的文件，或者文件内容不同的情况
            for path, info in server_files.items():
                try:
                    local_full_path = os.path.join(local_path, path)

                    # 检查文件是否存在于本地
                    file_exists_locally = path in local_files

                    # 检查文件大小是否不同
                    size_different = False
                    if file_exists_locally and info['type'] == 'file':
                        server_size = info['size']
                        local_size = local_files[path]['size']
                        size_different = server_size != local_size
                        if size_different:
                            sync_logger.info(f"文件大小不同: {path} (本地: {local_size}, 服务器: {server_size})")

                    # 如果是服务器优先策略，或者本地文件不存在，或者文件大小不同，或者强制同步，则从服务器同步
                    if (strategy == 'server_priority' or
                            not file_exists_locally or
                            size_different or
                            force_sync):

                        sync_logger.info(
                            f"从服务器同步: {path} (强制同步: {force_sync}, 文件存在: {file_exists_locally}, 大小不同: {size_different})")
                        if info['type'] == 'dir':
                            os.makedirs(local_full_path, exist_ok=True)
                            # 检查目录是否为空
                            if not os.path.exists(local_full_path):
                                os.makedirs(local_full_path, exist_ok=True)
                                sync_logger.info(f"创建本地目录: {local_full_path}")
                        else:
                            try:
                                self.sync_from_server(path)
                            except Exception as e:
                                sync_logger.error(f"从服务器同步文件失败: {path}, 错误: {str(e)}")
                except Exception as e:
                    sync_logger.error(f"同步服务器文件失败: {path}, 错误: {str(e)}")

            # 处理本地有但服务器没有的文件
            if strategy == 'server_priority':
                # 服务器优先：删除本地多余文件和目录
                # 先处理文件，再处理目录（确保先删除文件再删除目录）
                for path, info in sorted(local_files.items(), key=lambda x: 0 if x[1]['type'] == 'file' else 1,
                                         reverse=True):
                    if path not in server_files:
                        try:
                            full_path = os.path.join(local_path, path)
                            sync_logger.info(f"删除本地多余文件/目录: {path}")
                            if info['type'] == 'dir':
                                # 检查目录是否为空
                                if not os.listdir(full_path):
                                    try:
                                        os.rmdir(full_path)
                                        # 记录同步历史
                                        self.record_sync_history(os.path.basename(path), 'server_to_local', 'delete')
                                    except Exception as e:
                                        sync_logger.error(f"删除空目录失败: {path}, 错误: {str(e)}")
                            else:
                                try:
                                    os.remove(full_path)
                                    # 记录同步历史
                                    self.record_sync_history(os.path.basename(path), 'server_to_local', 'delete')
                                except Exception as e:
                                    sync_logger.error(f"删除文件失败: {path}, 错误: {str(e)}")
                        except Exception as e:
                            sync_logger.error(f"处理本地多余文件/目录失败: {path}, 错误: {str(e)}")
            elif strategy == 'time_size':
                # 时间大小比较：上传到服务器
                for path, info in local_files.items():
                    if path not in server_files:
                        try:
                            sync_logger.info(f"上传本地文件到服务器: {path}")
                            if info['type'] == 'dir':
                                self.ensure_remote_dir(path)
                            else:
                                self.sync_to_server(path)
                        except Exception as e:
                            sync_logger.error(f"处理本地文件失败: {path}, 错误: {str(e)}")
        except Exception as e:
            sync_logger.error(f"服务器到本地同步失败: {str(e)}")
            raise

    def _list_remote_files(self, current_path, result):
        """递归获取远程文件列表"""
        try:
            # 保存当前目录
            original_dir = self.ftp.pwd()

            # 切换到目标目录
            if current_path:
                try:
                    # 改进的中文路径处理
                    if any('\u4e00' <= c <= '\u9fff' for c in current_path):  # 检测中文字符
                        try:
                            # 尝试GBK编码
                            path_encoded = current_path.encode('gbk').decode('latin1')
                            self.ftp.cwd('/' + path_encoded)
                            sync_logger.info(f"成功使用GBK编码访问目录: {current_path}")
                        except Exception as e:
                            sync_logger.error(f"GBK编码访问目录失败: {current_path}, 尝试直接访问")
                            try:
                                # 尝试直接访问
                                self.ftp.cwd('/' + current_path)
                            except:
                                sync_logger.error(f"无法访问远程目录: {current_path}")
                                return
                    else:
                        self.ftp.cwd('/' + current_path)
                except Exception as e:
                    sync_logger.error(f"无法访问远程目录: {current_path}, 错误: {str(e)}")
                    return

            # 获取目录内容
            items = []
            try:
                self.ftp.dir(lambda x: items.append(x))
                sync_logger.info(f"远程目录 {current_path or '/'} 中找到 {len(items)} 个项目")

                # 详细记录目录内容
                if items:
                    sync_logger.info(f"目录 {current_path or '/'} 内容: {items}")
            except Exception as e:
                sync_logger.error(f"获取远程目录列表失败: {current_path}, 错误: {str(e)}")
                return

            # 检查是否是Windows风格的FTP列表（检测特征）
            is_windows_style = any('<DIR>' in item for item in items if item)
            sync_logger.info(f"检测到{'Windows' if is_windows_style else 'Unix'}风格的FTP列表")

            if is_windows_style:
                # Windows风格FTP服务器的文件列表解析
                for item in items:
                    try:
                        # 跳过空行
                        if not item.strip():
                            continue

                        # 检查是否为目录
                        is_dir = '<DIR>' in item

                        # 解析文件名和大小
                        if is_dir:
                            # 例如：06-03-25  02:52PM       <DIR>          2
                            parts = item.split('<DIR>', 1)
                            if len(parts) != 2:
                                continue

                            name = parts[1].strip()
                            size = 0
                        else:
                            # 例如：06-03-25  02:54PM                   21 1.txt
                            try:
                                # 尝试解析日期、时间、大小和文件名
                                date_time = item[:17].strip()  # 日期和时间部分
                                rest = item[17:].strip()  # 剩余部分

                                # 分离大小和文件名
                                size_and_name = rest.split(' ', 1)
                                if len(size_and_name) != 2:
                                    # 如果没有空格，可能整个都是文件名
                                    name = rest
                                    size = 0
                                else:
                                    try:
                                        size = int(size_and_name[0])
                                        name = size_and_name[1].strip()
                                    except ValueError:
                                        # 如果第一部分不是数字，尝试其他分割方式
                                        parts = rest.strip().split()
                                        if len(parts) >= 2:
                                            try:
                                                size = int(parts[0])
                                                name = ' '.join(parts[1:])
                                            except ValueError:
                                                name = rest
                                                size = 0
                                        else:
                                            name = rest
                                            size = 0
                            except Exception as parse_e:
                                sync_logger.error(f"解析Windows风格文件列表项失败: {item}, 错误: {str(parse_e)}")
                                continue

                        # 忽略 . 和 .. 目录
                        if name in ('.', '..'):
                            continue

                        # 构建完整路径
                        full_path = f"{current_path}/{name}".lstrip('/')

                        if is_dir:
                            # 记录目录
                            result[full_path] = {'type': 'dir', 'size': 0}
                            sync_logger.debug(f"发现远程目录: {full_path}")
                            # 递归处理子目录
                            self._list_remote_files(full_path, result)
                        else:
                            # 记录文件
                            result[full_path] = {'type': 'file', 'size': size}
                            sync_logger.info(f"发现远程文件: {full_path}, 大小: {size}")
                    except Exception as e:
                        sync_logger.error(f"处理Windows风格远程项目失败: {item}, 错误: {str(e)}")
                        continue
            else:
                # Unix风格FTP服务器的文件列表解析
                for item in items:
                    try:
                        # 解析目录列表项
                        parts = item.split(None, 8)
                        if len(parts) < 9:
                            continue
                        name = parts[8]
                        is_dir = item.startswith('d')
                        try:
                            size = int(parts[4])
                        except:
                            size = 0

                        # 忽略 . 和 .. 目录
                        if name in ('.', '..'):
                            continue

                        # 构建完整路径
                        full_path = f"{current_path}/{name}".lstrip('/')

                        if is_dir:
                            # 记录目录
                            result[full_path] = {'type': 'dir', 'size': 0}
                            sync_logger.debug(f"发现远程目录: {full_path}")
                            # 递归处理子目录
                            self._list_remote_files(full_path, result)
                        else:
                            # 记录文件
                            result[full_path] = {'type': 'file', 'size': size}
                            sync_logger.debug(f"发现远程文件: {full_path}, 大小: {size}")

                    except Exception as e:
                        sync_logger.error(f"处理Unix风格远程项目失败: {item}, 错误: {str(e)}")
                        continue

            # 如果目录列表为空或解析结果为空，尝试使用nlst命令
            if not items or not any(path.startswith(current_path) if current_path else True for path in result.keys()):
                try:
                    nlst_items = self.ftp.nlst()
                    sync_logger.info(f"使用NLST命令获取到 {len(nlst_items)} 个项目: {nlst_items}")

                    # 处理nlst返回的文件名列表
                    for name in nlst_items:
                        if name in ['.', '..']:
                            continue

                        # 构建完整路径
                        full_path = f"{current_path}/{name}".lstrip('/')

                        # 尝试确定是否为目录
                        is_dir = False
                        size = 0
                        try:
                            # 尝试获取文件大小，如果成功则为文件
                            current_dir = self.ftp.pwd()
                            size = self.ftp.size(name)

                            # 如果大小获取失败，可能是目录
                            if size is None:
                                try:
                                    self.ftp.cwd(name)  # 尝试进入目录
                                    is_dir = True
                                    self.ftp.cwd(current_dir)  # 返回原目录
                                except:
                                    # 不是目录，可能是无法获取大小的文件
                                    pass
                        except:
                            # 尝试直接进入判断是否为目录
                            try:
                                self.ftp.cwd(name)
                                is_dir = True
                                self.ftp.cwd(current_dir)
                            except:
                                # 不是目录
                                pass

                        if is_dir:
                            # 记录目录
                            result[full_path] = {'type': 'dir', 'size': 0}
                            sync_logger.debug(f"使用NLST发现远程目录: {full_path}")
                            # 递归处理子目录
                            self._list_remote_files(full_path, result)
                        else:
                            # 记录文件
                            result[full_path] = {'type': 'file', 'size': size or 0}
                            sync_logger.debug(f"使用NLST发现远程文件: {full_path}, 大小: {size or '未知'}")

                except Exception as e:
                    sync_logger.error(f"使用NLST命令失败: {str(e)}")

            # 返回原始目录
            try:
                self.ftp.cwd(original_dir)
            except:
                # 如果原始目录不存在，尝试返回根目录
                try:
                    self.ftp.cwd('/')
                except:
                    sync_logger.error("无法返回原始目录或根目录")

        except Exception as e:
            sync_logger.error(f"列出远程文件失败: {current_path}, 错误: {str(e)}")
            raise

    def upload_file(self, file_path, operation='modify'):
        """上传文件到FTP服务器"""
        try:
            # 规范化路径
            file_path = file_path.replace('\\', '/')
            local_full_path = os.path.join(self.sync_info['local_path'], file_path)

            if not os.path.exists(local_full_path):
                sync_logger.error(f"本地文件不存在: {file_path}")
                return False

            # 检查文件大小
            local_size = os.path.getsize(local_full_path)
            if local_size < self._min_file_size_threshold:
                sync_logger.warning(f"文件大小小于阈值({self._min_file_size_threshold}字节)，等待写入完成: {file_path}")
                time.sleep(1.0)  # 等待1秒
                local_size = os.path.getsize(local_full_path)
                if local_size < self._min_file_size_threshold:
                    sync_logger.warning(f"文件仍然小于阈值，跳过上传: {file_path}")
                    return False

            # 确保远程目录存在
            remote_dir = os.path.dirname(file_path)
            if remote_dir:
                self.ensure_remote_dir(remote_dir)

            # 上传文件
            upload_attempts = 0
            max_attempts = 3
            last_error = None

            while upload_attempts < max_attempts:
                try:
                    with open(local_full_path, 'rb') as f:
                        # 尝试不同的编码方式上传文件
                        success = False

                        # 1. 直接尝试
                        try:
                            self.ftp.storbinary(f'STOR {file_path}', f, blocksize=8192)
                            success = True
                            sync_logger.info(f"直接上传文件成功: {file_path}")
                        except Exception as direct_e:
                            if upload_attempts < max_attempts - 1:  # 如果还有重试机会，重新打开文件
                                f.seek(0)

                            # 2. 尝试UTF-8编码
                            try:
                                encoded_path = file_path.encode('utf-8').decode('latin1')
                                self.ftp.storbinary(f'STOR {encoded_path}', f, blocksize=8192)
                                success = True
                                sync_logger.info(f"使用UTF-8编码上传文件成功: {file_path}")
                            except Exception as utf8_e:
                                if upload_attempts < max_attempts - 1:
                                    f.seek(0)

                                # 3. 尝试GBK编码
                                try:
                                    encoded_path = file_path.encode('gbk').decode('latin1')
                                    self.ftp.storbinary(f'STOR {encoded_path}', f, blocksize=8192)
                                    success = True
                                    sync_logger.info(f"使用GBK编码上传文件成功: {file_path}")
                                except Exception as gbk_e:
                                    if upload_attempts == max_attempts - 1:  # 最后一次尝试失败
                                        sync_logger.error(f"所有编码方式上传都失败: {file_path}")
                                        sync_logger.error(f"直接上传错误: {str(direct_e)}")
                                        sync_logger.error(f"UTF-8编码错误: {str(utf8_e)}")
                                        sync_logger.error(f"GBK编码错误: {str(gbk_e)}")
                                        raise Exception("所有上传方式都失败")

                        # 如果上传成功，验证文件大小
                        if success:
                            try:
                                remote_size = self.ftp.size(file_path)
                                if remote_size == local_size:
                                    sync_logger.info(f"文件上传成功: {file_path} (大小: {local_size} 字节)")
                                    # 记录同步历史
                                    self.record_sync_history(os.path.basename(file_path), 'local_to_server', operation)
                                    return True
                                else:
                                    raise Exception(f"文件大小不匹配: 本地={local_size}, 远程={remote_size}")
                            except ftplib.error_perm as e:
                                raise Exception(f"无法验证上传: {str(e)}")

                except Exception as e:
                    last_error = e
                    upload_attempts += 1
                    if upload_attempts < max_attempts:
                        sync_logger.warning(f"上传失败，尝试重试 ({upload_attempts}/{max_attempts}): {str(e)}")
                        self._ftp = None  # 重置FTP连接
                        time.sleep(2)  # 等待2秒后重试
                    else:
                        sync_logger.error(f"上传失败，已达到最大重试次数: {str(e)}")
                        raise last_error

            return False

        except Exception as e:
            sync_logger.error(f"上传文件失败: {file_path}, 错误: {str(e)}")
            raise

    def list_remote_files(self):
        """获取远程文件列表"""
        try:
            result = {}
            self._list_remote_files('', result)
            return result
        except Exception as e:
            sync_logger.error(f"列出远程文件失败: {str(e)}")
            return {}