import asyncio
from dotenv import load_dotenv
from crawl4ai import AsyncWebCrawler, BrowserConfig, CrawlerRunConfig, CacheMode

# Load secrets from .env file
load_dotenv()

async def main():
    # Browser configuration
    browser_config = BrowserConfig(
      headless=True
    )

    # Crawler configuration
    crawler_config = CrawlerRunConfig(
        cache_mode=CacheMode.BYPASS
    )

    # Run the AI-powered crawler
    async with AsyncWebCrawler(config=browser_config) as crawler:
        result = await crawler.arun(
            # url="http://www.news.cn/20250617/34b678b9a20449c88db0a079f1938a6f/c.html",
            url="https://www.g2.com/products/bright-data/reviews",
            
            config=crawler_config
        )

        # print the first 1000 characters
        print(f"Parsed Markdown data:n{result.markdown[:1000]}")
        print(f"Response status code: {result.status_code}")

if __name__ == "__main__":
    asyncio.run(main())