import asyncio
from crawl4ai import AsyncWebCrawler, CacheMode, BrowserConfig, CrawlerRunConfig

# CSS Selector Example
async def simple_example_with_css_selector():
    print("\n--- Using CSS Selectors ---")
    browser_config = BrowserConfig(headless=True)
    crawler_config = CrawlerRunConfig(
        cache_mode=CacheMode.BYPASS, 
        css_selector=".wide-tease-item__description"  # 限制爬取范围
    )

    async with AsyncWebCrawler(config=browser_config) as crawler:
        result = await crawler.arun(
            url="https://www.nbcnews.com/business", config=crawler_config
        )
        print(f'限制范围后爬取长度: {len(result.markdown)}')

    async with AsyncWebCrawler(config=browser_config) as crawler:
        result = await crawler.arun(
            url="https://www.nbcnews.com/business", 
            config=crawler_config.clone(css_selector=None)
        )
        print(f'不限制范围爬取长度: {len(result.markdown)}')

if __name__ == "__main__":
    asyncio.run(simple_example_with_css_selector())
