# -*- coding: utf-8 -*-
"""
-------------------------------------------------
   File Name：     proxyScheduler
   Description :
   Author :        JHao
   date：          2019/8/5
-------------------------------------------------
   Change Activity:
                   2019/08/05: proxyScheduler
                   2021/02/23: runProxyCheck时,剩余代理少于POOL_SIZE_MIN时执行抓取
-------------------------------------------------
"""
__author__ = 'JHao'

import threading
import json
import pytz
import redis
import signal
import sys
import time
from datetime import datetime, timedelta
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.executors.pool import ProcessPoolExecutor
from util.six import Queue
from helper.fetch import Fetcher
from helper.check import Checker
from handler.logHandler import LogHandler
from handler.proxyHandler import ProxyHandler
from handler.configHandler import ConfigHandler


class ProxyScheduler:
    def __init__(self):
        self.config = ConfigHandler()
        self.timezone = pytz.timezone(self.config.timezone)
        self.scheduler_log = LogHandler("scheduler")
        
        # 使用后台调度器替代阻塞式调度器
        self.scheduler = BackgroundScheduler(
            logger=self.scheduler_log, 
            timezone=self.timezone
        )
        
        # Redis连接
        self.db = redis.from_url(self.config.dbConn)
        
        # Redis keys
        self.HEARTBEAT_KEY = "proxy_pool:heartbeat"
        self.SCHEDULER_STATUS_KEY = "proxy_pool:scheduler_status"
        self.LAST_CHECK_TIME_KEY = "proxy_pool:last_check_time"
        
        self.lock = threading.Lock()
        self.idle_threshold = timedelta(minutes=self.config.heartbeatInterval)
        self.job_ids = [
            "proxy_fetch",
            "proxy_check",
            "update_fetch_interval",
            "heartbeat_check"
        ]
        
        # 注册信号处理程序
        signal.signal(signal.SIGINT, self.signal_handler)
        signal.signal(signal.SIGTERM, self.signal_handler)

    def signal_handler(self, sig, frame):
        """处理终止信号"""
        self.scheduler_log.info(f"收到信号 {sig}, 正在优雅关闭...")
        self.cleanup()
        sys.exit(0)
        
    def cleanup(self):
        """清理资源"""
        try:
            self.stop_scheduler(force=True)
        except:
            pass

    def update_heartbeat(self):
        """更新心跳时间到Redis"""
        try:
            current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
            heartbeat_data = {
                "last_request_time": current_time,
                "timezone": str(self.timezone)
            }
            self.db.set(self.HEARTBEAT_KEY, json.dumps(heartbeat_data))
            
            if not self.is_scheduler_running():
                threading.Thread(target=self.start_scheduler).start()
                self.scheduler_log.info("检测到新请求，启动调度任务")
            return True
        except Exception as e:
            self.scheduler_log.error(f"更新心跳出错: {str(e)}")
            return False

    def is_scheduler_running(self):
        """从Redis检查调度器状态"""
        try:
            status = self.db.get(self.SCHEDULER_STATUS_KEY)
            if status:
                status = status.decode() if isinstance(status, bytes) else status
                return status == "running"
            return False
        except Exception as e:
            self.scheduler_log.error(f"获取调度器状态出错: {str(e)}")
            return False

    def check_heartbeat(self):
        """检查心跳状态"""
        try:
            heartbeat_data = self.db.get(self.HEARTBEAT_KEY)
            if not heartbeat_data:
                return
            
            if isinstance(heartbeat_data, bytes):
                heartbeat_data = heartbeat_data.decode()
                
            heartbeat = json.loads(heartbeat_data)
            last_time = datetime.strptime(heartbeat["last_request_time"], "%Y-%m-%d %H:%M:%S")
            current_time = datetime.now()
            idle_time = current_time - last_time
            
            self.scheduler_log.info(f"当前空闲时间: {idle_time.total_seconds()/60:.1f}分钟")
            
            if idle_time > self.idle_threshold:
                self.scheduler_log.info(
                    f"空闲超过{self.idle_threshold.total_seconds()/60}分钟，停止调度任务")
                # 在新线程中停止调度器
                threading.Thread(target=self.stop_scheduler).start()
                
        except Exception as e:
            self.scheduler_log.error(f"检查心跳出错: {str(e)}")

    def start_scheduler(self):
        """启动调度器"""
        with self.lock:
            if self.is_scheduler_running():
                return
            
            try:
                # 先设置状态
                self.db.set(self.SCHEDULER_STATUS_KEY, "running")
                
                # 确保调度器是新的实例
                if hasattr(self.scheduler, 'running') and self.scheduler.running:
                    self.scheduler.shutdown(wait=False)
                    
                self.scheduler = BackgroundScheduler(
                    logger=self.scheduler_log,
                    timezone=self.timezone
                )
                
                # 配置执行器
                executors = {
                    'default': {'type': 'threadpool', 'max_workers': 20},
                    'processpool': ProcessPoolExecutor(max_workers=5)
                }
                job_defaults = {
                    'coalesce': False,
                    'max_instances': 10
                }
                
                self.scheduler.configure(executors=executors, job_defaults=job_defaults)
                
                # 初始执行一次代理获取
                self.__runProxyFetch()
                
                # 添加所有定时任务
                jobs = [
                    {
                        'func': self.__runProxyFetch,
                        'trigger': 'interval',
                        'seconds': 30,
                        'id': 'proxy_fetch',
                        'name': 'proxy采集'
                    },
                    {
                        'func': self.__runProxyCheck,
                        'trigger': 'interval',
                        'minutes': 5,
                        'id': 'proxy_check',
                        'name': 'proxy检查'
                    },
                    {
                        'func': self.update_fetch_job,
                        'trigger': 'interval',
                        'seconds': 30,
                        'id': 'update_fetch_interval',
                        'name': '更新采集频率'
                    },
                    {
                        'func': self.check_heartbeat,
                        'trigger': 'interval',
                        'seconds': 60,
                        'id': 'heartbeat_check',
                        'name': '心跳检查'
                    }
                ]
                
                for job in jobs:
                    self.scheduler.add_job(**job)
                
                # 启动调度器(非阻塞式)
                self.scheduler.start()
                self.scheduler_log.info("所有调度任务已启动")
                
            except Exception as e:
                self.db.set(self.SCHEDULER_STATUS_KEY, "stopped")
                self.scheduler_log.error(f"启动调度器出错: {str(e)}")
                raise

    def stop_scheduler(self, force=False):
        """停止调度器"""
        with self.lock:
            if not force and not self.is_scheduler_running():
                return
                
            try:
                # 先更新状态，防止其他操作
                self.db.set(self.SCHEDULER_STATUS_KEY, "stopped")
                
                # 关闭调度器
                if hasattr(self.scheduler, 'running') and self.scheduler.running:
                    try:
                        # 先暂停所有作业
                        for job_id in self.job_ids:
                            try:
                                self.scheduler.pause_job(job_id)
                            except:
                                pass
                                
                        # 移除所有作业
                        self.scheduler.remove_all_jobs()
                        
                        # 关闭调度器
                        self.scheduler.shutdown(wait=False)
                        self.scheduler_log.info("调度器已关闭")
                    except Exception as e:
                        self.scheduler_log.error(f"关闭调度器出错: {str(e)}")
                
                # 清理心跳数据
                self.db.delete(self.HEARTBEAT_KEY)
                
            except Exception as e:
                self.scheduler_log.error(f"停止调度器出错: {str(e)}")
            finally:
                self.scheduler_log.info("调度任务已完全停止")

    def __runProxyFetch(self):
        """代理采集"""
        proxy_queue = Queue()
        proxy_handler = ProxyHandler()
        proxy_fetcher = Fetcher()

        if proxy_handler.db.getCount().get("total", 0) >= proxy_handler.conf.poolSizeMin:
            return

        for proxy in proxy_fetcher.run():
            proxy_queue.put(proxy)

        Checker("raw", proxy_queue)

    def __runProxyCheck(self):
        """代理检查"""
        proxy_handler = ProxyHandler()
        proxy_queue = Queue()

        if proxy_handler.db.getCount().get("total", 0) < proxy_handler.conf.poolSizeMin:
            self.__runProxyFetch()

        for proxy in proxy_handler.getAll():
            proxy_queue.put(proxy)

        Checker("use", proxy_queue)
        last_check_time =  datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        # 更新最后检查时间
        self.db.set(self.LAST_CHECK_TIME_KEY, last_check_time)
        self.scheduler_log.info(f"更新最终检查时间: {last_check_time}")

    def update_fetch_job(self):
        """更新代理采集任务的执行间隔"""
        try:
            interval = self.adjust_fetch_interval()
            current_job = self.scheduler.get_job('proxy_fetch')

            if not current_job or current_job.trigger.interval.seconds != interval:
                self.scheduler.add_job(
                    self.__runProxyFetch,
                    'interval',
                    seconds=interval,
                    id="proxy_fetch",
                    name="proxy采集",
                    replace_existing=True
                )
                self.scheduler_log.info(f"更新采集任务间隔: {interval}秒")
        except Exception as e:
            self.scheduler_log.error(f"更新采集任务出错: {str(e)}")

    def adjust_fetch_interval(self):
        """调整采集频率"""
        proxy_handler = ProxyHandler()
        conf = ConfigHandler()

        current_count = proxy_handler.db.getCount().get("total", 0)
        min_size = conf.poolSizeMin

        if min_size == 0:
            self.scheduler_log.warning("最小池容量配置为0，使用默认采集间隔")
            return 60

        capacity_percentage = (current_count / min_size) * 100

        if capacity_percentage < 50:
            interval = 2
        elif capacity_percentage < 80:
            interval = 30
        else:
            interval = 60

        self.scheduler_log.info(f"当前代理数量: {current_count}, 最小要求: {min_size}, "
                               f"容量百分比: {capacity_percentage:.1f}%, 调整间隔为: {interval}秒")
        return interval

    def run(self):
        """运行调度器"""
        try:
            # 清理可能的旧状态
            self.db.set(self.SCHEDULER_STATUS_KEY, "stopped")
            
            # 启动调度器
            self.start_scheduler()
            
            # 保持主线程运行
            while True:
                time.sleep(1)
        except KeyboardInterrupt:
            self.scheduler_log.info("收到终止信号")
            self.cleanup()
        except Exception as e:
            self.scheduler_log.error(f"运行调度器出错: {str(e)}")
            self.cleanup()
            raise

# 创建全局调度器实例
proxy_scheduler = ProxyScheduler()

if __name__ == '__main__':
    proxy_scheduler.run()
