import Bull, { Queue, Job } from 'bull';
import { QUEUE_NAMES } from '@weubi/shared';
import { CrawlerService } from '../services/CrawlerService';
import { logger } from '../utils/logger';

export class QueueProcessor {
  private crawlerQueue: Queue;
  private isProcessing = false;

  constructor(private crawlerService: CrawlerService) {
    const redisConfig = {
      host: process.env.REDIS_HOST || 'localhost',
      port: parseInt(process.env.REDIS_PORT || '6379'),
      password: process.env.REDIS_PASSWORD || undefined,
    };

    this.crawlerQueue = new Bull(QUEUE_NAMES.CRAWLER, {
      redis: redisConfig,
      settings: {
        stalledInterval: 30 * 1000, // 30秒
        maxStalledCount: 1,
      },
    });

    this.setupEventHandlers();
  }

  async start(): Promise<void> {
    if (this.isProcessing) return;

    // 设置并发处理数量
    const concurrency = parseInt(process.env.CRAWLER_CONCURRENCY || '5');
    
    // 注册任务处理器
    this.crawlerQueue.process('crawl', concurrency, this.processCrawlerJob.bind(this));

    this.isProcessing = true;
    logger.info(`✅ Queue processor started with concurrency: ${concurrency}`);
  }

  async stop(): Promise<void> {
    if (!this.isProcessing) return;

    try {
      // 暂停队列，不再接收新任务
      await this.crawlerQueue.pause();
      
      // 等待当前任务完成
      await this.crawlerQueue.whenCurrentJobsFinished();
      
      // 关闭队列
      await this.crawlerQueue.close();

      this.isProcessing = false;
      logger.info('✅ Queue processor stopped');
    } catch (error) {
      logger.error('❌ Error stopping queue processor:', error);
      throw error;
    }
  }

  private async processCrawlerJob(job: Job): Promise<any> {
    const { taskId, url, engine, config } = job.data;
    
    logger.info(`🔄 Processing crawler job ${job.id} for task ${taskId}`);

    try {
      // 更新任务进度
      await job.progress(10);

      // 执行爬虫任务
      const result = await this.crawlerService.executeCrawlerTask(taskId);

      // 更新任务进度
      await job.progress(100);

      logger.info(`✅ Crawler job ${job.id} completed successfully`);
      return result;

    } catch (error) {
      logger.error(`❌ Crawler job ${job.id} failed:`, error);
      throw error;
    }
  }

  private setupEventHandlers(): void {
    // 任务完成事件
    this.crawlerQueue.on('completed', (job: Job, result: any) => {
      logger.info(`✅ Job ${job.id} completed in ${Date.now() - job.timestamp}ms`);
    });

    // 任务失败事件
    this.crawlerQueue.on('failed', (job: Job, err: Error) => {
      logger.error(`❌ Job ${job.id} failed after ${job.attemptsMade} attempts:`, err.message);
    });

    // 任务停滞事件
    this.crawlerQueue.on('stalled', (job: Job) => {
      logger.warn(`⚠️ Job ${job.id} stalled and will be retried`);
    });

    // 任务进度事件
    this.crawlerQueue.on('progress', (job: Job, progress: number) => {
      logger.debug(`📊 Job ${job.id} progress: ${progress}%`);
    });

    // 任务激活事件
    this.crawlerQueue.on('active', (job: Job) => {
      logger.info(`🚀 Job ${job.id} started processing`);
    });

    // 任务等待事件
    this.crawlerQueue.on('waiting', (jobId: string) => {
      logger.debug(`⏳ Job ${jobId} is waiting`);
    });

    // 队列错误事件
    this.crawlerQueue.on('error', (error: Error) => {
      logger.error('❌ Queue error:', error);
    });

    // 队列清理事件
    this.crawlerQueue.on('cleaned', (jobs: Job[], type: string) => {
      logger.info(`🧹 Cleaned ${jobs.length} ${type} jobs`);
    });
  }

  // 获取队列统计信息
  async getQueueStats(): Promise<any> {
    const waiting = await this.crawlerQueue.getWaiting();
    const active = await this.crawlerQueue.getActive();
    const completed = await this.crawlerQueue.getCompleted();
    const failed = await this.crawlerQueue.getFailed();
    const delayed = await this.crawlerQueue.getDelayed();

    return {
      waiting: waiting.length,
      active: active.length,
      completed: completed.length,
      failed: failed.length,
      delayed: delayed.length,
      isPaused: await this.crawlerQueue.isPaused(),
    };
  }

  // 清理已完成的任务
  async cleanCompletedJobs(): Promise<void> {
    try {
      await this.crawlerQueue.clean(24 * 60 * 60 * 1000, 'completed'); // 清理24小时前的已完成任务
      await this.crawlerQueue.clean(7 * 24 * 60 * 60 * 1000, 'failed'); // 清理7天前的失败任务
      logger.info('✅ Completed jobs cleaned');
    } catch (error) {
      logger.error('❌ Error cleaning completed jobs:', error);
    }
  }

  // 重试失败的任务
  async retryFailedJobs(): Promise<void> {
    try {
      const failedJobs = await this.crawlerQueue.getFailed();
      for (const job of failedJobs) {
        await job.retry();
      }
      logger.info(`✅ Retried ${failedJobs.length} failed jobs`);
    } catch (error) {
      logger.error('❌ Error retrying failed jobs:', error);
    }
  }
}
