#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
@Project ：python常用模块库 
@File ：files_monitor.py
@IDE  ：PyCharm 
@Author ：李涵彬
@Date ：2025/2/21 上午9:47 
"""

"""
多目录文件监控系统
Version: 2.0
功能特性：
- 支持多个独立配置的监控目录
- 每个目录可设置不同参数
- 线程安全的清理操作
- 详细的目录级日志
- 动态配置加载支持
"""

import logging
import threading
import time
from datetime import datetime
from pathlib import Path
from typing import Dict, List

# #################### 配置区域 ####################
MONITOR_CONFIGS = [
	{
		"path": "/var/log/app1",
		"max_files": 1000,
		"max_size_gb": 10.0,
		"exclude": ["*.tmp", "*.swap"],
		"check_interval": 300
	},
	{
		"path": "/data/images",
		"max_files": 5000,
		"max_size_gb": 50.0,
		"exclude": ["*.metadata"],
		"check_interval": 600,
		"retention_days": 30  # 新增保留天数策略
	}
]


# #################################################

class DirectoryMonitor:
	"""单个目录监控实例"""

	def __init__(self, config: Dict):
		self.config = self._validate_config(config)
		self.lock = threading.RLock()
		self.logger = self._setup_logger()
		self.last_check = datetime.min
		self._validate_directory()

	def _validate_config(self, config: Dict) -> Dict:
		"""验证并补全配置"""
		required = ['path', 'max_files', 'max_size_gb']
		for field in required:
			if field not in config:
				raise ValueError(f"Missing required config field: {field}")

		return {
			'path': Path(config['path']).resolve(),
			'max_files': config['max_files'],
			'max_size_gb': config['max_size_gb'],
			'exclude': config.get('exclude', []),
			'check_interval': config.get('check_interval', 300),
			'retention_days': config.get('retention_days', None)
		}

	def _setup_logger(self) -> logging.Logger:
		"""创建目录专属日志器"""
		logger = logging.getLogger(f"DirectoryMonitor::{self.config['path']}")
		logger.setLevel(logging.INFO)

		# 避免重复添加handler
		if not logger.handlers:
			handler = logging.FileHandler('file_monitor.log')
			formatter = logging.Formatter(
				'%(asctime)s [%(levelname)s] %(name)s: %(message)s',
				datefmt='%Y-%m-%d %H:%M:%S'
			)
			handler.setFormatter(formatter)
			logger.addHandler(handler)

		return logger

	def _validate_directory(self):
		"""验证目录有效性"""
		path = self.config['path']
		if not path.exists():
			raise FileNotFoundError(f"Directory not found: {path}")
		if not path.is_dir():
			raise NotADirectoryError(f"Path is not a directory: {path}")

	def should_check(self) -> bool:
		"""判断是否需要执行检查"""
		elapsed = (datetime.now() - self.last_check).total_seconds()
		return elapsed >= self.config['check_interval']

	def check_and_clean(self):
		"""执行清理操作"""
		with self.lock:
			try:
				self.last_check = datetime.now()
				files = self._scan_files()
				self._apply_policies(files)
			except Exception as e:
				self.logger.error(f"检查失败: {str(e)}", exc_info=True)

	def _scan_files(self) -> List[Dict]:
		"""扫描目录文件"""
		files = []
		for entry in self.config['path'].rglob('*'):
			if self._should_skip(entry):
				continue

			try:
				stat = entry.stat()
				files.append({
					'path': entry,
					'size': stat.st_size,
					'ctime': stat.st_ctime,
					'mtime': stat.st_mtime,
					'is_dir': entry.is_dir()
				})
			except Exception as e:
				self.logger.warning(f"扫描文件失败: {entry} - {str(e)}")

		return sorted(files, key=lambda x: x['ctime'])

	def _should_skip(self, path: Path) -> bool:
		"""判断是否跳过文件"""
		if not path.exists() or path.is_symlink():
			return True
		if any(path.match(p) for p in self.config['exclude']):
			return True
		return False

	def _apply_policies(self, files: List[Dict]):
		"""应用所有清理策略"""
		candidates = []

		# 容量策略
		total_size = sum(f['size'] for f in files)
		if total_size > self.config['max_size_gb'] * 1024 ** 3:
			candidates += files[:int(len(files) * 0.2)]  # 取最旧的20%

		# 数量策略
		if len(files) > self.config['max_files']:
			candidates += files[:len(files) - self.config['max_files']]

		# 保留时间策略
		if self.config['retention_days']:
			cutoff = datetime.now().timestamp() - self.config['retention_days'] * 86400
			candidates += [f for f in files if f['mtime'] < cutoff]

		# 去重后清理
		unique_candidates = {f['path']: f for f in candidates}.values()
		self._perform_cleanup(list(unique_candidates))

	def _perform_cleanup(self, candidates: List[Dict]):
		"""执行清理操作"""
		deleted = 0
		freed = 0

		for file_info in candidates:
			if self._safe_delete(file_info):
				deleted += 1
				freed += file_info['size']

			current_count = len(candidates) - deleted
			current_size = sum(f['size'] for f in candidates) - freed

			if current_count <= self.config['max_files'] and \
					current_size <= self.config['max_size_gb'] * 1024 ** 3:
				break

		if deleted > 0:
			self.logger.warning(
				f"清理完成: 删除 {deleted} 个项目, "
				f"释放 {freed / 1024 ** 3:.2f}GB 空间"
			)

	def _safe_delete(self, file_info: Dict) -> bool:
		"""安全删除文件"""
		path = file_info['path']
		try:
			if file_info['is_dir']:
				self._delete_directory(path)
			else:
				path.unlink(missing_ok=True)

			self.logger.info(
				f"已删除: {path.name} (创建于: {self._format_time(file_info['ctime'])})"
			)
			return True
		except Exception as e:
			self.logger.error(f"删除失败 {path}: {str(e)}")
			return False

	def _delete_directory(self, path: Path):
		"""递归删除目录"""
		for item in path.iterdir():
			if item.is_dir():
				self._delete_directory(item)
			else:
				item.unlink(missing_ok=True)
		path.rmdir()

	@staticmethod
	def _format_time(timestamp: float) -> str:
		return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')


class MultiDirectoryMonitor:
	"""多目录监控管理器"""

	def __init__(self, configs: List[Dict]):
		self.monitors = [DirectoryMonitor(cfg) for cfg in configs]
		self._running = False
		self._threads = []

	def start(self):
		"""启动监控服务"""
		self._running = True
		self._threads = [
			threading.Thread(target=self._monitor_loop, daemon=True)
			for _ in self.monitors
		]

		for thread, monitor in zip(self._threads, self.monitors):
			thread.name = f"MonitorThread-{monitor.config['path'].name}"
			thread.start()

		logging.info(f"启动 {len(self.monitors)} 个目录监控")
		try:
			while self._running:
				time.sleep(1)
		except KeyboardInterrupt:
			self.stop()

	def stop(self):
		"""停止监控服务"""
		self._running = False
		for thread in self._threads:
			thread.join(timeout=30)
		logging.info("监控服务已停止")

	def _monitor_loop(self):
		"""单个目录的监控循环"""
		monitor = None
		for m in self.monitors:
			if threading.current_thread().name.endswith(m.config['path'].name):
				monitor = m
				break

		if not monitor:
			return

		while self._running:
			try:
				if monitor.should_check():
					monitor.check_and_clean()
				time.sleep(1)
			except Exception as e:
				logging.error(f"监控线程异常: {str(e)}", exc_info=True)
				time.sleep(10)


if __name__ == "__main__":
	# 初始化日志
	logging.basicConfig(
		level=logging.INFO,
		format='%(asctime)s [%(levelname)s] %(message)s',
		handlers=[
			logging.FileHandler('file_monitor.log'),
			logging.StreamHandler()
		]
	)

	try:
		monitor = MultiDirectoryMonitor(MONITOR_CONFIGS)
		monitor.start()
	except Exception as e:
		logging.critical(f"启动失败: {str(e)}", exc_info=True)
