import {
  Injectable,
  BadRequestException,
  Logger,
  Inject,
} from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { CACHE_MANAGER } from '@nestjs/cache-manager';
import type { Cache } from 'cache-manager';
import { Novel } from '../novel/novel.entity';
import { Chapter } from '../novel/chapter.entity';
import { QidianCrawler } from './qidian.crawler';
import { FanqieCrawler } from './fanqie.crawler';
import { ICrawler, NovelInfo } from './interfaces/crawler.interface';

@Injectable()
export class CrawlerService {
  private readonly logger = new Logger(CrawlerService.name);
  private crawlers: Map<string, ICrawler> = new Map();

  constructor(
    @InjectRepository(Novel)
    private novelRepository: Repository<Novel>,
    @InjectRepository(Chapter)
    private chapterRepository: Repository<Chapter>,
    @Inject(CACHE_MANAGER)
    private cacheManager: Cache,
    private qidianCrawler: QidianCrawler,
    private fanqieCrawler: FanqieCrawler,
  ) {
    this.crawlers.set('qidian', this.qidianCrawler);
    this.crawlers.set('fanqie', this.fanqieCrawler);
  }

  async crawlNovelFromUrl(url: string): Promise<Novel> {
    // 检查缓存
    const cacheKey = `novel:${url}`;
    const cachedNovel = await this.cacheManager.get<Novel>(cacheKey);
    if (cachedNovel) {
      this.logger.log(`从缓存获取小说: ${cachedNovel.title}`);
      return cachedNovel;
    }

    const crawler = this.getCrawlerByurl(url);
    if (!crawler) {
      throw new BadRequestException('不支持的URL，目前仅支持起点和番茄小说');
    }

    // 检查小说是否已经存在
    const existingNovel = await this.novelRepository.findOne({
      where: { sourceUrl: url },
    });

    if (existingNovel) {
      this.logger.log(`小说已存在: ${existingNovel.title}`);
      // 缓存已存在的小说
      await this.cacheManager.set(cacheKey, existingNovel, 3600);
      return existingNovel;
    }

    this.logger.log(`开始爬取小说: ${url}`);
    const novelInfo = await crawler.getNovelInfo(url);

    // 创建小说记录
    const novel = this.novelRepository.create({
      title: novelInfo.title,
      author: novelInfo.author,
      cover: novelInfo.cover,
      description: novelInfo.description,
      category: novelInfo.category,
      wordCount: novelInfo.wordCount,
      status: novelInfo.status,
      source: crawler.getSource(),
      sourceUrl: url,
      chapterCount: novelInfo.chapterList.length,
    });

    const savedNovel = await this.novelRepository.save(novel);
    this.logger.log(`小说创建成功: ${savedNovel.title}`);

    // 缓存小说信息
    await this.cacheManager.set(cacheKey, savedNovel, 3600);

    // 异步爬取章节内容（增加爬取数量）
    this.crawlChaptersAsync(savedNovel.id, novelInfo.chapterList.slice(0, 10));

    return savedNovel;
  }

  private async crawlChaptersAsync(novelId: number, chapterList: any[]) {
    this.logger.log(`开始爬取 ${chapterList.length} 个章节`);

    // 获取小说信息以确定使用哪个爬虫
    const novel = await this.novelRepository.findOne({
      where: { id: novelId },
    });
    if (!novel) {
      this.logger.error(`小说不存在: ${novelId}`);
      return;
    }
    const crawler = this.crawlers.get(novel.source);
    if (!crawler) {
      this.logger.error(`找不到爬虫: ${novel.source}`);
      return;
    }

    for (const chapterInfo of chapterList) {
      try {
        // 使用环境变量中的延迟时间
        const delay = parseInt(process.env.CRAWLER_DELAY || '2000') || 2000;
        await this.delay(delay);

        // 检查章节是否已存在
        const existingChapter = await this.chapterRepository.findOne({
          where: { novelId, chapterIndex: chapterInfo.index },
        });

        if (existingChapter) {
          this.logger.log(`章节已存在，跳过: ${chapterInfo.title}`);
          continue;
        }

        // 检查缓存
        const chapterCacheKey = `chapter:${novelId}:${chapterInfo.index}`;
        const cachedContent = await this.cacheManager.get(chapterCacheKey);

        let content;
        if (cachedContent) {
          content = cachedContent;
          this.logger.log(`从缓存获取章节: ${chapterInfo.title}`);
        } else {
          content = await crawler.getChapterContent(chapterInfo.url);
          // 缓存章节内容
          await this.cacheManager.set(chapterCacheKey, content, 7200); // 2小时
        }

        const chapter = this.chapterRepository.create({
          title: content.title,
          content: content.content,
          chapterIndex: chapterInfo.index,
          novelId,
          wordCount: content.content.length,
        });

        await this.chapterRepository.save(chapter);
        this.logger.log(`章节保存成功: ${content.title}`);
      } catch (error) {
        this.logger.error(`爬取章节失败: ${chapterInfo.title}`, error);
        // 继续处理下一个章节，不中断整个流程
      }
    }

    // 更新小说的章节数量
    await this.novelRepository.update(novelId, {
      chapterCount: await this.chapterRepository.count({ where: { novelId } }),
    });
  }

  private getCrawlerByurl(url: string): ICrawler | null {
    for (const [source, crawler] of this.crawlers) {
      if (crawler.validateUrl(url)) {
        return crawler;
      }
    }
    return null;
  }

  private delay(ms: number): Promise<void> {
    return new Promise((resolve) => setTimeout(resolve, ms));
  }

  // 手动触发章节爬取
  async continueCrawlChapters(novelId: number): Promise<void> {
    const novel = await this.novelRepository.findOne({
      where: { id: novelId },
    });

    if (!novel) {
      throw new BadRequestException('小说不存在');
    }

    const crawler = this.crawlers.get(novel.source);
    if (!crawler) {
      throw new BadRequestException('找不到对应的爬虫');
    }

    const novelInfo = await crawler.getNovelInfo(novel.sourceUrl);
    const existingChapters = await this.chapterRepository.find({
      where: { novelId },
      order: { chapterIndex: 'DESC' },
      take: 1,
    });

    const lastIndex =
      existingChapters.length > 0 ? existingChapters[0].chapterIndex : 0;
    const newChapters = novelInfo.chapterList.filter(
      (chapter) => chapter.index > lastIndex,
    );

    if (newChapters.length === 0) {
      this.logger.log('没有新章节需要爬取');
      return;
    }

    await this.crawlChaptersAsync(novelId, newChapters);
  }
}
