import os
import time
import yaml
import requests
import logging
import sched
import urllib3
from logging.handlers import RotatingFileHandler
from urllib.parse import urljoin
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry

class ClientConfig:
    def __init__(self, config_path):
        with open(config_path) as f:
            config = yaml.safe_load(f)
        
        self.scan_interval = config['client']['scan_interval']
        self.base_dir = os.path.abspath(config['client']['base_dir'])
        self.log_file = config['client']['log_file']
        self.server_url = config['server']['url']  # 服务端基础地址
        self.timeout = config['server'].get('timeout', 15)
        self.max_retries = config['server'].get('max_retries', 3)
        
        # 日志配置
        logging_cfg = config.get('logging', {})
        self.log_level = logging_cfg.get('level', 'INFO').upper()
        self.log_max_size = logging_cfg.get('max_size', 10*1024*1024)  # 默认10MB
        self.log_backup_count = logging_cfg.get('backup_count', 5)
        
        self._validate()

    def _validate(self):
        if not os.path.isdir(self.base_dir):
            raise ValueError(f"无效的工作目录: {self.base_dir}")
        if not self.server_url.startswith(('http://', 'https://')):
            raise ValueError("服务器地址必须以 http:// 或 https:// 开头")

def setup_logging(log_file, level=logging.INFO, max_size=10*1024*1024, backup_count=5):
    logger = logging.getLogger()
    logger.setLevel(level)

    handler = RotatingFileHandler(
        filename=log_file,
        maxBytes=max_size,
        backupCount=backup_count,
        encoding='utf-8'
    )
    formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
    handler.setFormatter(formatter)
    logger.addHandler(handler)

class FileDownloader:
    def __init__(self, base_dir, server_url, timeout=15, max_retries=3, backoff_factor=3):
        self.base_dir = base_dir
        self.server_url = server_url
        self.timeout = timeout
        self.session = self._create_session(timeout, max_retries, backoff_factor)
        
    def _create_session(self, timeout, max_retries, backoff_factor):
        """创建带重试机制的HTTP会话"""
        session = requests.Session()

        """ 避免重试机制对服务器造成负担，注释掉以下代码
        retry = Retry(
            total = max_retries,
            backoff_factor = backoff_factor,
            status_forcelist = [500, 502, 503, 504]
        )

        if hasattr(urllib3, 'allowed_methods'):
            retry.allowed_methods = ['GET']  # 对于 urllib3 >= 1.26.0
        else:
            retry.method_whitelist = ['GET']  # 对于 urllib3 < 1.26.0

        session.mount('http://', HTTPAdapter(max_retries=retry))
        session.mount('https://', HTTPAdapter(max_retries=retry))
        """

        return session

    def download_file(self, filename):
        """下载并替换指定文件"""
        file_url = urljoin(self.server_url, f"{filename}")
        save_path = os.path.join(self.base_dir, filename)
        temp_path = f"{save_path}.tmp"
        
        try:
            with self.session.get(file_url, stream=True, timeout=self.timeout) as r:
                if r.status_code == 404:
                    os.remove(save_path)
                    logging.warning(f"已删除文件: {filename}")
                    return True
                    
                r.raise_for_status()
                
                # 写入临时文件
                with open(temp_path, 'wb') as f:
                    for chunk in r.iter_content(chunk_size=8192):
                        if chunk:
                            f.write(chunk)
                
                # 验证文件有效性
                if not self._validate_file(temp_path):
                    raise ValueError("下载文件验证失败")
                
                # 替换原文件
                os.replace(temp_path, save_path)
                logging.info(f"文件更新成功: {filename}")
                return True
                
        except Exception as e:
            logging.error(f"文件下载失败 {filename}: {str(e)}")
            if os.path.exists(temp_path):
                os.remove(temp_path)
            return False
            
    def _validate_file(self, file_path):
        """基础文件验证"""
        # 检查文件是否为空
        if os.path.getsize(file_path) == 0:
            raise ValueError("下载文件大小为0")

        return True

class FileScanner:
    def __init__(self, base_dir, time_window = 24, time_s_window = 25): #24小时, 25分钟
        self.base_dir = base_dir
        self.time_window = time_window * 3600  # 转换为秒
        self.time_s_window = time_s_window * 60

    def find_zero_kb_files(self):
        """查找最近n小时内x分钟前的0KB文件"""
        cutoff = time.time() - self.time_window
        cutoff_s = time.time() - self.time_s_window
        found_files = []
        
        for root, _, files in os.walk(self.base_dir):
            for f in files:
                file_path = os.path.join(root, f)
                try:
                    if not os.path.exists(file_path):
                        continue # 跳过被定期清理的文件

                    stat = os.stat(file_path)
                    if stat.st_size == 0 and stat.st_mtime > cutoff and stat.st_mtime < cutoff_s:
                        rel_path = os.path.relpath(file_path, self.base_dir)
                        found_files.append(rel_path)
                except Exception as e:
                    logging.error(f"文件扫描错误: {str(e)}")
        
        return found_files

def main():
    try:
        config = ClientConfig("config.yaml")
    except Exception as e:
        print(f"配置加载失败: {str(e)}")
        return

    setup_logging(
        log_file=config.log_file,
        level=getattr(logging, config.log_level, logging.INFO),
        max_size=config.log_max_size,
        backup_count=config.log_backup_count
    )

    scanner = FileScanner(config.base_dir)
    downloader = FileDownloader(config.base_dir, config.server_url, 
                              config.timeout, config.max_retries)
    scheduler = sched.scheduler(time.time, time.sleep)

    def scan_and_download():
        try:
            # 扫描0KB文件
            zero_files = scanner.find_zero_kb_files()
            if len(zero_files) > 0:
                logging.info(f"----------")
                logging.info(f"发现{len(zero_files)}个待处理文件")
                
                # 逐个下载更新
                success = 0
                for filename in zero_files:
                    if downloader.download_file(filename):
                        success += 1
                        
                logging.info(f"处理完成: 成功{success}个, 失败{len(zero_files)-success}个")
            
        except Exception as e:
            logging.error(f"扫描下载异常: {str(e)}")
        finally:
            scheduler.enter(config.scan_interval, 1, scan_and_download)

    # 首次立即执行，后续按间隔执行
    scheduler.enter(0, 1, scan_and_download)
    try:
        logging.info(f"执行定时扫描...")
        scheduler.run()
    except KeyboardInterrupt:
        logging.info("程序已安全终止")

if __name__ == '__main__':
    main()