from django.core.management.base import BaseCommand
from news.crawlers.gxbh_crawler import GXBHCrawler


class Command(BaseCommand):
    help = "爬取广西北海新闻网的新闻"

    def add_arguments(self, parser):
        parser.add_argument("--startPage", type=int, help="开始的页数", default=1)
        parser.add_argument("--pageCount", type=int, help="要获取的页数", default=1)
        parser.add_argument("--category", type=str, help="新闻分类", default=None)

    def handle(self, *args, **options):
        pageCount: int = options["pageCount"]
        category: str = options["category"]
        startPage: int = options["startPage"]

        self.stdout.write(
            self.style.SUCCESS(
                f"开始爬取新闻，分类: {category or '全部'}，数量限制: {pageCount * 10}"
            )
        )

        crawler = GXBHCrawler()

        for i in range(pageCount):
            self.stdout.write(
                self.style.SUCCESS(f"开始爬取第 {startPage + i} 页，共 {pageCount} 页")
            )
            count: int = crawler.crawl_and_save(page=startPage + i, category=category)
            self.stdout.write(self.style.SUCCESS(f"爬取完成，成功保存 {count} 条新闻"))
