import os
import threading
import requests
import time
import math
import json
from pathlib import Path
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed
from commands import BaiduPanCommands
from exceptions import NetworkError, BaiduApiError

class ChunkDownloader:
    """分片下载器"""
    def __init__(self, url, file_path, start_byte, end_byte, chunk_id, progress_callback=None):
        """
        初始化分片下载器
        
        参数:
            url: 下载URL
            file_path: 本地文件路径
            start_byte: 分片起始字节
            end_byte: 分片结束字节
            chunk_id: 分片ID
            progress_callback: 进度回调函数
        """
        self.url = url
        self.file_path = file_path
        self.start_byte = start_byte
        self.end_byte = end_byte
        self.chunk_id = chunk_id
        self.progress_callback = progress_callback
        self.chunk_size = end_byte - start_byte + 1
        self.downloaded = 0
        self.status = "pending"  # pending, downloading, completed, failed
        self.lock = threading.Lock()
        
        # 创建分片临时文件
        self.temp_file = f"{file_path}.part{chunk_id}"
    
    def download(self):
        """下载分片"""
        self.status = "downloading"
        headers = {"Range": f"bytes={self.start_byte}-{self.end_byte}"}
        
        try:
            # 检查是否已有部分下载
            if os.path.exists(self.temp_file):
                downloaded_size = os.path.getsize(self.temp_file)
                if downloaded_size < self.chunk_size:
                    # 继续下载剩余部分
                    headers["Range"] = f"bytes={self.start_byte + downloaded_size}-{self.end_byte}"
                    self.downloaded = downloaded_size  # 保持与文件一致
                elif downloaded_size == self.chunk_size:
                    # 分片已下载完成
                    self.status = "completed"
                    self.downloaded = self.chunk_size  # 确保一致性
                    if self.progress_callback:
                        self.progress_callback(self.chunk_id, 0)  # 通知进度更新
                    return True
                else:
                    # 文件大小异常，重新下载
                    os.remove(self.temp_file)
                    self.downloaded = 0  # 重置下载状态
            
            # 创建或追加文件
            mode = "ab" if os.path.exists(self.temp_file) and self.downloaded > 0 else "wb"
            with open(self.temp_file, mode) as f:
                with requests.get(self.url, headers=headers, stream=True) as r:
                    r.raise_for_status()
                    for chunk in r.iter_content(chunk_size=8192):
                        if chunk:
                            f.write(chunk)
                            with self.lock:
                                self.downloaded += len(chunk)
                                if self.progress_callback:
                                    self.progress_callback(self.chunk_id, len(chunk))
            
            self.status = "completed"
            return True
        except Exception as e:
            self.status = "failed"
            raise NetworkError(f"分片下载失败: {e}")    
    
    def download_old(self):
        """下载分片"""
        self.status = "downloading"
        headers = {"Range": f"bytes={self.start_byte}-{self.end_byte}"}
        
        try:
            # 检查是否已有部分下载
            if os.path.exists(self.temp_file):
                downloaded_size = os.path.getsize(self.temp_file)
                if downloaded_size < self.chunk_size:
                    # 继续下载剩余部分
                    headers["Range"] = f"bytes={self.start_byte + downloaded_size}-{self.end_byte}"
                    self.downloaded = downloaded_size
                elif downloaded_size == self.chunk_size:
                    # 分片已下载完成
                    self.status = "completed"
                    if self.progress_callback:
                        self.progress_callback(self.chunk_id, self.chunk_size)
                    return True
                else:
                    # 文件大小异常，重新下载
                    os.remove(self.temp_file)
            
            # 创建或追加文件
            mode = "ab" if os.path.exists(self.temp_file) else "wb"
            with open(self.temp_file, mode) as f:
                with requests.get(self.url, headers=headers, stream=True) as r:
                    r.raise_for_status()
                    for chunk in r.iter_content(chunk_size=8192):
                        if chunk:
                            f.write(chunk)
                            with self.lock:
                                self.downloaded += len(chunk)
                                if self.progress_callback:
                                    self.progress_callback(self.chunk_id, len(chunk))
            
            self.status = "completed"
            return True
        except Exception as e:
            self.status = "failed"
            raise NetworkError(f"分片下载失败: {e}")
    
    def get_progress(self):
        """获取下载进度"""
        return self.downloaded, self.chunk_size

class DownloadManager:
    """下载管理器"""
    def __init__(self, commands: BaiduPanCommands, max_workers=4, chunk_size=4 * 1024 * 1024):
        """
        初始化下载管理器
        
        参数:
            commands: BaiduPanCommands实例
            max_workers: 最大并发线程数
            chunk_size: 分片大小(字节)
        """
        self.commands = commands
        self.max_workers = max_workers
        self.chunk_size = chunk_size
        self.tasks = {}
        self.progress_bars = {}
        self.lock = threading.Lock()
        self.status_file = Path.home() / ".baidupan" / "download_status.json"

        # 确保状态目录存在
        self.status_file.parent.mkdir(parents=True, exist_ok=True)

    def _get_download_url(self, remote_path):
        """获取文件下载URL"""
        # 获取文件fsid和大小
        file_info = self.commands._get_file_info(remote_path)
        if not file_info or "fs_id" not in file_info:
            raise Exception("文件不存在")

        fsid = file_info['fs_id']
        file_size = file_info['size']
        fsids_json = json.dumps([int(fsid)])  # 确保 fsid 是整数

        # 获取下载链接
        result = self.commands._make_request(
            "GET",
            "multimedia",
            params={
                "method": "filemetas",
                "fsids": fsids_json,
                "dlink": 1
            }
        )
        
        if 'list' not in result or not result['list']:
            raise Exception("无法获取下载链接")

        file_info = result['list'][0]
        if 'dlink' not in file_info:
            raise Exception("文件信息中缺少下载链接")
                    
        download_url = file_info['dlink']
        
        # 添加access_token
        download_url = f"{download_url}&access_token={self.commands.access_token}"
        
        return download_url, file_size
    
    def _load_download_status(self, file_path):
        """加载下载状态"""
        if not self.status_file.exists():
            return None
        
        try:
            with open(self.status_file, 'r') as f:
                status_data = json.load(f)
                return status_data.get(file_path)
        except:
            return None
    
    def _save_download_status(self, file_path, status):
        """保存下载状态"""
        status_data = {}
        if self.status_file.exists():
            try:
                with open(self.status_file, 'r') as f:
                    status_data = json.load(f)
            except:
                pass
        
        status_data[file_path] = status
        
        try:
            with open(self.status_file, 'w') as f:
                json.dump(status_data, f, indent=2)
            return True
        except:
            return False
    
    def _remove_download_status(self, file_path):
        """移除下载状态"""
        if not self.status_file.exists():
            return True
        
        try:
            with open(self.status_file, 'r') as f:
                status_data = json.load(f)
            
            if file_path in status_data:
                del status_data[file_path]
                
            with open(self.status_file, 'w') as f:
                json.dump(status_data, f, indent=2)
            return True
        except:
            return False

    def download_file(self, remote_path, local_path, progress_callback=None):
        """下载文件（支持断点续传和多线程）"""
        # 获取下载URL和文件大小
        download_url, file_size = self._get_download_url(remote_path)
        
        # 检查本地文件是否已存在（完整文件）
        if os.path.exists(local_path):
            local_size = os.path.getsize(local_path)
            if local_size == file_size:
                print(f"✅ 文件已存在: {local_path}")
                return True
            else:
                print(f"⚠️  文件已存在但大小不匹配，将重新下载")
                os.remove(local_path)
        
        # 加载下载状态
        status = self._load_download_status(local_path)
        chunks = []
        
        # 计算分片
        num_chunks = math.ceil(file_size / self.chunk_size)
        
        # 检查并清理可能存在的无效临时文件
        for i in range(num_chunks):
            temp_file = f"{local_path}.part{i}"
            if status is None and os.path.exists(temp_file):
                # 没有状态记录但有临时文件，删除临时文件
                os.remove(temp_file)
        
        for i in range(num_chunks):
            start_byte = i * self.chunk_size
            end_byte = min(start_byte + self.chunk_size - 1, file_size - 1)
            
            # 检查是否有保存的状态
            downloaded = 0
            if status and i < len(status['chunks']):
                downloaded = status['chunks'][i]
                
                # 检查临时文件是否存在且与状态一致
                temp_file = f"{local_path}.part{i}"
                if os.path.exists(temp_file):
                    actual_size = os.path.getsize(temp_file)
                    if actual_size != downloaded:
                        # 临时文件与状态不一致，以临时文件为准
                        downloaded = actual_size
                else:
                    # 临时文件不存在，重置状态
                    downloaded = 0
            
            chunk = ChunkDownloader(
                url=download_url,
                file_path=local_path,
                start_byte=start_byte,
                end_byte=end_byte,
                chunk_id=i,
                progress_callback=self._chunk_progress_callback
            )
            chunk.downloaded = downloaded
            chunks.append(chunk)
        
        # 更新状态文件，确保与实际情况一致
        self._save_download_status(local_path, {
            'remote_path': remote_path,
            'file_size': file_size,
            'chunks': [c.downloaded for c in chunks]
        })
        
        # 创建任务
        task_id = os.path.basename(remote_path)
        self.tasks[task_id] = {
            'remote_path': remote_path,
            'local_path': local_path,
            'file_size': file_size,
            'chunks': chunks,
            'start_time': time.time(),
            'progress_callback': progress_callback
        }
        
        # 创建进度条，设置初始值
        initial_downloaded = sum(c.downloaded for c in chunks)
        self.progress_bars[task_id] = tqdm(
            total=file_size,
            initial=initial_downloaded,
            unit='B',
            unit_scale=True,
            desc=f"下载 {os.path.basename(remote_path)}",
            position=0
        )
        
        # 使用线程池下载
        try:
            with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
                futures = {}
                for chunk in chunks:
                    if chunk.downloaded < chunk.chunk_size:
                        future = executor.submit(chunk.download)
                        futures[future] = chunk.chunk_id
                
                # 等待所有分片完成
                for future in as_completed(futures):
                    chunk_id = futures[future]
                    try:
                        future.result()
                    except Exception as e:
                        print(f"❌❌ 分片 {chunk_id} 下载失败: {e}")
            
            # 合并分片
            self._merge_chunks(local_path, chunks)
            
            # 验证文件大小
            if os.path.getsize(local_path) != file_size:
                raise Exception("下载文件大小不匹配")
            
            # 清理状态和临时文件
            self._remove_download_status(local_path)
            for i in range(num_chunks):
                temp_file = f"{local_path}.part{i}"
                if os.path.exists(temp_file):
                    os.remove(temp_file)
            
            # 关闭进度条
            self.progress_bars[task_id].close()
            del self.progress_bars[task_id]
            del self.tasks[task_id]
            
            return True
        except Exception as e:
            # 保存当前状态
            self._save_current_status(task_id)
            raise e
        
    def download_file_old(self, remote_path, local_path, progress_callback=None):
        """
        下载文件（支持断点续传和多线程）
        
        参数:
            remote_path: 远程文件路径
            local_path: 本地保存路径
            progress_callback: 进度回调函数
        """
        # 获取下载URL和文件大小
        download_url, file_size = self._get_download_url(remote_path)
        
        # 检查本地文件是否已存在
        if os.path.exists(local_path):
            local_size = os.path.getsize(local_path)
            if local_size == file_size:
                print(f"✅ 文件已存在: {local_path}")
                return True
            else:
                print(f"⚠️  文件已存在但大小不匹配，将重新下载")
                os.remove(local_path)
        
        # 加载下载状态
        status = self._load_download_status(local_path)
        chunks = []
        
        # 计算分片
        num_chunks = math.ceil(file_size / self.chunk_size)
        for i in range(num_chunks):
            start_byte = i * self.chunk_size
            end_byte = min(start_byte + self.chunk_size - 1, file_size - 1)
            
            # 检查是否有保存的状态
            downloaded = 0
            if status and i < len(status['chunks']):
                downloaded = status['chunks'][i]
            
            chunk = ChunkDownloader(
                url=download_url,
                file_path=local_path,
                start_byte=start_byte,
                end_byte=end_byte,
                chunk_id=i,
                progress_callback=self._chunk_progress_callback
            )
            chunk.downloaded = downloaded
            chunks.append(chunk)
        
        # 创建任务
        task_id = os.path.basename(remote_path)
        self.tasks[task_id] = {
            'remote_path': remote_path,
            'local_path': local_path,
            'file_size': file_size,
            'chunks': chunks,
            'start_time': time.time(),
            'progress_callback': progress_callback
        }
        
        # 创建进度条
        self.progress_bars[task_id] = tqdm(
            total=file_size,
            unit='B',
            unit_scale=True,
            desc=f"下载 {os.path.basename(remote_path)}",
            position=0
        )
        
        # 保存初始状态
        self._save_download_status(local_path, {
            'remote_path': remote_path,
            'file_size': file_size,
            'chunks': [0] * num_chunks
        })
        
        # 使用线程池下载
        try:
            with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
                futures = {}
                for chunk in chunks:
                    if chunk.downloaded < chunk.chunk_size:
                        future = executor.submit(chunk.download)
                        futures[future] = chunk.chunk_id
                
                # 等待所有分片完成
                for future in as_completed(futures):
                    chunk_id = futures[future]
                    try:
                        future.result()
                    except Exception as e:
                        print(f"❌ 分片 {chunk_id} 下载失败: {e}")
            
            # 合并分片
            self._merge_chunks(local_path, chunks)
            
            # 验证文件大小
            if os.path.getsize(local_path) != file_size:
                raise Exception("下载文件大小不匹配")
                # raise NetworkError("下载文件大小不匹配")
            
            # 清理状态
            self._remove_download_status(local_path)
            
            # 关闭进度条
            self.progress_bars[task_id].close()
            del self.progress_bars[task_id]
            del self.tasks[task_id]
            
            return True
        except Exception as e:
            # 保存当前状态
            self._save_current_status(task_id)
            raise e
    
    def _chunk_progress_callback(self, chunk_id, chunk_size):
        """分片进度回调"""
        with self.lock:
            # 更新分片下载量
            for task_id, task in self.tasks.items():
                if chunk_id < len(task['chunks']):
                    task['chunks'][chunk_id].downloaded += chunk_size
                    
                    # 更新进度条
                    if task_id in self.progress_bars:
                        self.progress_bars[task_id].update(chunk_size)
                    
                    # 保存状态
                    self._save_current_status(task_id)
                    
                    # 调用外部回调
                    if task['progress_callback']:
                        total_downloaded = sum(c.downloaded for c in task['chunks'])
                        task['progress_callback'](total_downloaded, task['file_size'])
    
    def _save_current_status(self, task_id):
        """保存当前下载状态"""
        if task_id in self.tasks:
            task = self.tasks[task_id]
            status = {
                'remote_path': task['remote_path'],
                'file_size': task['file_size'],
                'chunks': [c.downloaded for c in task['chunks']]
            }
            self._save_download_status(task['local_path'], status)
    
    def _merge_chunks(self, local_path, chunks):
        """合并分片文件"""
        print(f"🔄 合并分片文件: {local_path}")
        with open(local_path, 'wb') as outfile:
            for chunk in chunks:
                temp_file = f"{local_path}.part{chunk.chunk_id}"
                if os.path.exists(temp_file):
                    with open(temp_file, 'rb') as infile:
                        while True:
                            data = infile.read(8192)
                            if not data:
                                break
                            outfile.write(data)
                    # 删除临时文件
                    os.remove(temp_file)
    
    def resume_download(self, local_path):
        """恢复下载"""
        status = self._load_download_status(local_path)
        if not status:
            raise FileNotFoundError("未找到下载状态")
        
        return self.download_file(status['remote_path'], local_path)

class UploadManager:
    """上传管理器（预留）"""
    def __init__(self, commands: BaiduPanCommands, max_workers=4, chunk_size=4 * 1024 * 1024):
        """
        初始化上传管理器
        
        参数:
            commands: BaiduPanCommands实例
            max_workers: 最大并发线程数
            chunk_size: 分片大小(字节)
        """
        self.commands = commands
        self.max_workers = max_workers
        self.chunk_size = chunk_size
    
    def upload_file(self, local_path, remote_path, progress_callback=None):
        """上传文件（预留）"""
        # TODO: 实现大文件上传的断点续传
        raise NotImplementedError("大文件上传功能尚未实现")
