from flask import Blueprint, request, jsonify
import os
import json
import uuid
import threading
from datetime import datetime, timedelta
from pathlib import Path
from werkzeug.utils import secure_filename

from database import UploadDAO
from file_manager import FileManager
from utils.logger import get_logger
from config import Config

logger = get_logger(__name__)


class ChunkedUploadSession:
    """分片上传会话管理"""

    def __init__(self):
        self.sessions = {}
        self.lock = threading.Lock()
        self.temp_dir = Path(Config.TEMP_FOLDER) / 'chunks'
        self._ensure_temp_dir()

        # 启动清理线程
        self.cleanup_thread = threading.Thread(target=self._cleanup_expired_sessions, daemon=True)
        self.cleanup_thread.start()

    def _ensure_temp_dir(self):
        """确保临时目录存在"""
        try:
            self.temp_dir.mkdir(parents=True, exist_ok=True)
            logger.debug(f"确保临时目录存在: {self.temp_dir}")
        except Exception as e:
            logger.error(f"创建临时目录失败: {self.temp_dir}, 错误: {e}")
            raise

    def create_session(self, filename, file_size, file_type, chunk_size):
        """创建上传会话"""
        # 确保临时目录存在
        self._ensure_temp_dir()

        upload_id = str(uuid.uuid4())
        session_dir = self.temp_dir / upload_id

        try:
            # 使用 parents=True 确保父目录也被创建
            session_dir.mkdir(parents=True, exist_ok=True)
        except Exception as e:
            logger.error(f"创建会话目录失败: {session_dir}, 错误: {e}")
            raise ValueError(f"无法创建上传会话目录: {e}")

        session_info = {
            'upload_id': upload_id,
            'filename': secure_filename(filename),
            'file_size': file_size,
            'file_type': file_type,
            'chunk_size': chunk_size,
            'total_chunks': (file_size + chunk_size - 1) // chunk_size,
            'uploaded_chunks': set(),
            'session_dir': str(session_dir),
            'created_time': datetime.now(),
            'last_activity': datetime.now()
        }

        with self.lock:
            self.sessions[upload_id] = session_info

        self._save_session_info(upload_id, session_info)
        logger.info(f"创建上传会话: {upload_id}, 文件: {filename}, 大小: {file_size}")
        return session_info

    def upload_chunk(self, upload_id, chunk_index, chunk_data):
        """上传分片"""
        with self.lock:
            if upload_id not in self.sessions:
                raise ValueError("上传会话不存在或已过期")

            session = self.sessions[upload_id]
            session['last_activity'] = datetime.now()

        # 确保会话目录存在
        session_dir = Path(session['session_dir'])
        if not session_dir.exists():
            try:
                session_dir.mkdir(parents=True, exist_ok=True)
                logger.warning(f"重新创建会话目录: {session_dir}")
            except Exception as e:
                logger.error(f"重新创建会话目录失败: {session_dir}, 错误: {e}")
                raise ValueError(f"会话目录不可用: {e}")

        # 保存分片
        chunk_file = session_dir / f"chunk_{chunk_index}"

        try:
            with open(chunk_file, 'wb') as f:
                f.write(chunk_data)
        except Exception as e:
            logger.error(f"保存分片失败: {chunk_file}, 错误: {e}")
            raise ValueError(f"保存分片失败: {e}")

        # 更新已上传分片列表
        with self.lock:
            session['uploaded_chunks'].add(chunk_index)
            self._save_session_info(upload_id, session)

        logger.debug(f"上传分片 {chunk_index}/{session['total_chunks']} for {upload_id}")
        return session

    def complete_upload(self, upload_id, file_manager, upload_dao):
        """完成上传，合并分片"""
        with self.lock:
            if upload_id not in self.sessions:
                raise ValueError("上传会话不存在或已过期")

            session = self.sessions[upload_id].copy()

        try:
            # 检查所有分片是否都已上传
            if len(session['uploaded_chunks']) != session['total_chunks']:
                missing_chunks = set(range(session['total_chunks'])) - session['uploaded_chunks']
                raise ValueError(f"缺少分片: {missing_chunks}")

            # 验证会话目录和分片文件是否存在
            session_dir = Path(session['session_dir'])
            if not session_dir.exists():
                raise ValueError("会话目录不存在，无法完成上传")

            # 检查所有分片文件是否存在
            missing_chunk_files = []
            for i in range(session['total_chunks']):
                chunk_file = session_dir / f"chunk_{i}"
                if not chunk_file.exists():
                    missing_chunk_files.append(i)

            if missing_chunk_files:
                raise ValueError(f"分片文件缺失: {missing_chunk_files}")

            # 合并分片到最终文件
            file_id = str(uuid.uuid4())
            file_ext = os.path.splitext(session['filename'])[1]
            final_filename = f"{file_id}{file_ext}"

            # 根据文件类型确定保存目录
            save_dir = os.path.join(Config.LIBRARY_FOLDER, session['file_type'])
            os.makedirs(save_dir, exist_ok=True)
            final_path = os.path.join(save_dir, final_filename)

            logger.info(f"开始合并分片: {upload_id}, 目标文件: {final_path}")

            # 合并分片文件
            try:
                with open(final_path, 'wb') as final_file:
                    for i in range(session['total_chunks']):
                        chunk_file = session_dir / f"chunk_{i}"
                        with open(chunk_file, 'rb') as chunk:
                            final_file.write(chunk.read())
            except Exception as e:
                # 如果合并失败，删除部分写入的文件
                if os.path.exists(final_path):
                    try:
                        os.remove(final_path)
                    except:
                        pass
                raise ValueError(f"合并分片失败: {e}")

            # 验证文件大小
            actual_size = os.path.getsize(final_path)
            if actual_size != session['file_size']:
                os.remove(final_path)
                raise ValueError(f"文件大小不匹配: 期望 {session['file_size']}, 实际 {actual_size}")

            # 生成预览
            try:
                preview_path = file_manager._generate_preview(file_id, final_path, file_ext)
            except Exception as e:
                logger.warning(f"生成预览失败: {e}")
                preview_path = None

            # 保存到数据库
            success = upload_dao.save_upload(
                file_id,
                session['filename'],
                session['file_type'],
                final_path,
                preview_path,
                session['file_size']
            )

            if not success:
                # 清理文件
                if os.path.exists(final_path):
                    os.remove(final_path)
                if preview_path and os.path.exists(preview_path):
                    os.remove(preview_path)
                raise ValueError("保存到数据库失败")

            # 清理上传会话
            self._cleanup_session(upload_id)

            logger.info(f"文件上传完成: {upload_id} -> {file_id}")

            return {
                'success': True,
                'file_id': file_id,
                'original_name': session['filename'],
                'file_type': session['file_type'],
                'file_size': session['file_size'],
                'preview_url': f'/api/preview-file/{file_id}' if preview_path else None
            }

        except Exception as e:
            logger.error(f"完成上传失败: {upload_id}, 错误: {e}")
            self._cleanup_session(upload_id)
            raise

    def cancel_upload(self, upload_id):
        """取消上传"""
        logger.info(f"取消上传: {upload_id}")
        self._cleanup_session(upload_id)

    def get_upload_status(self, upload_id):
        """获取上传状态"""
        with self.lock:
            if upload_id not in self.sessions:
                return None

            session = self.sessions[upload_id]
            return {
                'upload_id': upload_id,
                'filename': session['filename'],
                'uploaded_chunks': list(session['uploaded_chunks']),
                'total_chunks': session['total_chunks'],
                'progress': len(session['uploaded_chunks']) / session['total_chunks'] * 100
            }

    def _save_session_info(self, upload_id, session_info):
        """保存会话信息到文件"""
        try:
            session_dir = Path(session_info['session_dir'])
            if not session_dir.exists():
                session_dir.mkdir(parents=True, exist_ok=True)

            session_file = session_dir / 'session.json'
            session_data = session_info.copy()
            session_data['uploaded_chunks'] = list(session_data['uploaded_chunks'])
            session_data['created_time'] = session_data['created_time'].isoformat()
            session_data['last_activity'] = session_data['last_activity'].isoformat()

            with open(session_file, 'w') as f:
                json.dump(session_data, f)
        except Exception as e:
            logger.warning(f"保存会话信息失败: {e}")

    def _cleanup_session(self, upload_id):
        """清理上传会话"""
        with self.lock:
            if upload_id in self.sessions:
                session = self.sessions[upload_id]
                session_dir = Path(session['session_dir'])

                # 删除会话目录
                if session_dir.exists():
                    import shutil
                    try:
                        shutil.rmtree(session_dir, ignore_errors=True)
                        logger.debug(f"清理会话目录: {session_dir}")
                    except Exception as e:
                        logger.warning(f"清理会话目录失败: {session_dir}, 错误: {e}")

                del self.sessions[upload_id]

    def _cleanup_expired_sessions(self):
        """清理过期的上传会话"""
        import time

        while True:
            try:
                time.sleep(300)  # 每5分钟清理一次

                expired_sessions = []
                cutoff_time = datetime.now() - timedelta(hours=2)  # 2小时过期

                with self.lock:
                    for upload_id, session in self.sessions.items():
                        if session['last_activity'] < cutoff_time:
                            expired_sessions.append(upload_id)

                for upload_id in expired_sessions:
                    logger.info(f"清理过期会话: {upload_id}")
                    self._cleanup_session(upload_id)

                # 清理可能残留的目录
                self._cleanup_orphaned_directories()

            except Exception as e:
                logger.error(f"清理过期会话失败: {e}")

    def _cleanup_orphaned_directories(self):
        """清理孤立的目录（没有对应会话的目录）"""
        try:
            if not self.temp_dir.exists():
                return

            with self.lock:
                active_upload_ids = set(self.sessions.keys())

            # 遍历chunks目录，删除不在活跃会话中的目录
            for item in self.temp_dir.iterdir():
                if item.is_dir() and item.name not in active_upload_ids:
                    try:
                        # 检查目录的最后修改时间，只删除超过2小时的目录
                        import time
                        dir_mtime = os.path.getmtime(item)
                        if time.time() - dir_mtime > 7200:  # 2小时
                            import shutil
                            shutil.rmtree(item, ignore_errors=True)
                            logger.debug(f"清理孤立目录: {item}")
                    except Exception as e:
                        logger.warning(f"清理孤立目录失败: {item}, 错误: {e}")
        except Exception as e:
            logger.warning(f"清理孤立目录失败: {e}")

    def recover_sessions_from_disk(self):
        """从磁盘恢复上传会话（服务重启时调用）"""
        try:
            if not self.temp_dir.exists():
                return

            logger.info("从磁盘恢复上传会话...")
            recovered_count = 0

            for session_dir in self.temp_dir.iterdir():
                if not session_dir.is_dir():
                    continue

                session_file = session_dir / 'session.json'
                if not session_file.exists():
                    continue

                try:
                    with open(session_file, 'r') as f:
                        session_data = json.load(f)

                    # 恢复时间对象
                    session_data['created_time'] = datetime.fromisoformat(session_data['created_time'])
                    session_data['last_activity'] = datetime.fromisoformat(session_data['last_activity'])
                    session_data['uploaded_chunks'] = set(session_data['uploaded_chunks'])

                    # 检查是否过期
                    cutoff_time = datetime.now() - timedelta(hours=2)
                    if session_data['last_activity'] < cutoff_time:
                        logger.debug(f"跳过过期会话: {session_dir.name}")
                        continue

                    # 验证分片文件
                    valid_chunks = set()
                    for chunk_index in session_data['uploaded_chunks']:
                        chunk_file = session_dir / f"chunk_{chunk_index}"
                        if chunk_file.exists():
                            valid_chunks.add(chunk_index)

                    session_data['uploaded_chunks'] = valid_chunks

                    with self.lock:
                        self.sessions[session_dir.name] = session_data

                    recovered_count += 1
                    logger.debug(f"恢复会话: {session_dir.name}")

                except Exception as e:
                    logger.warning(f"恢复会话失败: {session_dir.name}, 错误: {e}")

            if recovered_count > 0:
                logger.info(f"成功恢复 {recovered_count} 个上传会话")

        except Exception as e:
            logger.error(f"从磁盘恢复会话失败: {e}")


# 全局分片上传会话管理器
chunked_upload_manager = ChunkedUploadSession()


def create_upload_routes(upload_dao: UploadDAO, file_manager: FileManager):
    """创建上传相关路由"""

    upload_bp = Blueprint('upload', __name__)

    # 服务启动时尝试恢复会话
    try:
        chunked_upload_manager.recover_sessions_from_disk()
    except Exception as e:
        logger.warning(f"恢复上传会话失败: {e}")

    @upload_bp.route('/upload', methods=['POST'])
    def upload_file():
        """传统文件上传接口（适合小文件）"""
        try:
            if 'file' not in request.files:
                return jsonify({'success': False, 'message': '没有文件'}), 400

            file = request.files['file']
            if file.filename == '':
                return jsonify({'success': False, 'message': '没有选择文件'}), 400

            file_type = request.form.get('type', 'unknown')

            # 保存文件
            file_info, error = file_manager.save_uploaded_file(file, file_type)

            if error:
                return jsonify({'success': False, 'message': error}), 400

            # 保存到数据库
            success = upload_dao.save_upload(
                file_info['file_id'],
                file_info['original_name'],
                file_info['file_type'],
                file_info['file_path'],
                file_info['preview_path'],
                file_info['file_size']
            )

            if not success:
                # 如果数据库保存失败，删除已保存的文件
                file_manager.delete_file(file_info['file_path'])
                if file_info['preview_path']:
                    file_manager.delete_file(file_info['preview_path'])
                return jsonify({'success': False, 'message': '保存到数据库失败'}), 500

            result = {
                'success': True,
                'file_id': file_info['file_id'],
                'original_name': file_info['original_name'],
                'file_type': file_info['file_type'],
                'file_size': file_info['file_size'],
                'preview_url': f'/api/preview-file/{file_info["file_id"]}' if file_info['preview_path'] else None
            }

            return jsonify(result)

        except Exception as e:
            logger.error(f"文件上传失败: {e}")
            return jsonify({'success': False, 'message': f'上传失败: {str(e)}'}), 500

    # ================ 分片上传相关路由 ================

    @upload_bp.route('/upload/init', methods=['POST'])
    def init_chunked_upload():
        """初始化分片上传"""
        try:
            data = request.get_json()

            filename = data.get('filename')
            file_size = data.get('fileSize')
            file_type = data.get('fileType', 'unknown')
            chunk_size = data.get('chunkSize', 5 * 1024 * 1024)  # 默认5MB

            if not filename or not file_size:
                return jsonify({'success': False, 'message': '缺少必要参数'}), 400

            # 检查文件大小限制（10GB）
            max_size = 10 * 1024 * 1024 * 1024
            if file_size > max_size:
                return jsonify({'success': False, 'message': '文件大小超过限制'}), 400

            # 检查文件类型
            file_ext = os.path.splitext(filename)[1].lower()
            if not file_manager._is_allowed_file(file_ext, file_type):
                return jsonify({'success': False, 'message': '文件类型不支持'}), 400

            session_info = chunked_upload_manager.create_session(
                filename, file_size, file_type, chunk_size
            )

            return jsonify({
                'success': True,
                'uploadId': session_info['upload_id'],
                'totalChunks': session_info['total_chunks'],
                'uploadedChunks': list(session_info['uploaded_chunks'])
            })

        except Exception as e:
            logger.error(f"初始化分片上传失败: {e}")
            return jsonify({'success': False, 'message': str(e)}), 500

    @upload_bp.route('/upload/chunk', methods=['POST'])
    def upload_chunk():
        """上传分片"""
        try:
            upload_id = request.form.get('uploadId')
            chunk_index = int(request.form.get('chunkIndex'))

            if 'chunk' not in request.files:
                return jsonify({'success': False, 'message': '没有分片数据'}), 400

            chunk_file = request.files['chunk']
            chunk_data = chunk_file.read()

            session = chunked_upload_manager.upload_chunk(upload_id, chunk_index, chunk_data)

            return jsonify({
                'success': True,
                'chunkIndex': chunk_index,
                'uploadedChunks': len(session['uploaded_chunks']),
                'totalChunks': session['total_chunks']
            })

        except ValueError as e:
            return jsonify({'success': False, 'message': str(e)}), 400
        except Exception as e:
            logger.error(f"上传分片失败: {e}")
            return jsonify({'success': False, 'message': str(e)}), 500

    @upload_bp.route('/upload/complete', methods=['POST'])
    def complete_chunked_upload():
        """完成分片上传"""
        try:
            data = request.get_json()
            upload_id = data.get('uploadId')

            if not upload_id:
                return jsonify({'success': False, 'message': '缺少uploadId'}), 400

            result = chunked_upload_manager.complete_upload(
                upload_id, file_manager, upload_dao
            )

            return jsonify(result)

        except ValueError as e:
            return jsonify({'success': False, 'message': str(e)}), 400
        except Exception as e:
            logger.error(f"完成分片上传失败: {e}")
            return jsonify({'success': False, 'message': str(e)}), 500

    @upload_bp.route('/upload/cancel', methods=['POST'])
    def cancel_chunked_upload():
        """取消分片上传"""
        try:
            data = request.get_json()
            upload_id = data.get('uploadId')

            if not upload_id:
                return jsonify({'success': False, 'message': '缺少uploadId'}), 400

            chunked_upload_manager.cancel_upload(upload_id)

            return jsonify({'success': True, 'message': '上传已取消'})

        except Exception as e:
            logger.error(f"取消分片上传失败: {e}")
            return jsonify({'success': False, 'message': str(e)}), 500

    @upload_bp.route('/upload/status/<upload_id>', methods=['GET'])
    def get_chunked_upload_status(upload_id):
        """获取分片上传状态"""
        try:
            status = chunked_upload_manager.get_upload_status(upload_id)

            if status is None:
                return jsonify({'success': False, 'message': '上传会话不存在'}), 404

            return jsonify({'success': True, 'status': status})

        except Exception as e:
            logger.error(f"获取上传状态失败: {e}")
            return jsonify({'success': False, 'message': str(e)}), 500

    return upload_bp