import asyncio
import random
from playwright.async_api import async_playwright

async def scrape_table():
    async with async_playwright() as p:
        browser = await p.chromium.launch(headless=False)
        context = await browser.new_context(storage_state="storage.json")
        page = await context.new_page()
        await page.goto(
            "https://app.sensortower-china.com/top-charts?country=US&category=0&date=2025-09-01&device=iphone&os=ios"
        )

        # 自动滚动，确保数据加载完全
        # previous_height = None
        # while True:
        #     current_height = await page.evaluate("document.body.scrollHeight")
        #     if previous_height == current_height:
        #         break
        #     previous_height = current_height
        #     await page.evaluate("window.scrollTo(0, document.body.scrollHeight)")
        #     await page.wait_for_timeout(1500)

        # 等待整个表格加载
        await page.wait_for_selector(
            "#mainContent > div.MuiBox-root.css-i9gxme > div > div.infinite-scroll-component__outerdiv > div > div.MuiTableContainer-root.css-1p6ntod > table > tbody"
        )
        tbody = await page.query_selector(
            "#mainContent > div.MuiBox-root.css-i9gxme > div > div.infinite-scroll-component__outerdiv > div > div.MuiTableContainer-root.css-1p6ntod > table > tbody"
        )

        rows = await tbody.query_selector_all("tr")
        table_data = []
        for idx, row in enumerate(rows, start=1):
            tds = await row.query_selector_all("td")
            for td in tds:
                raw_text = await td.inner_text()
                icon_el = await td.query_selector("img")
                icon_url = await icon_el.get_attribute("src") if icon_el else ""
                link_el = await td.query_selector("a")
                link_href = await link_el.get_attribute("href") if link_el else ""
                table_data.append({
                    "raw_text": raw_text.strip().replace("\n", " "),
                    "icon_url": icon_url,
                    "link_href": link_href
                })
        print(f"第 {idx} 条完成")
        delay_s = random.uniform(1, 3)
        print(f"主循环暂停 {delay_s:.2f} 秒...")
        await asyncio.sleep(delay_s)

        print("\n完整数据:")
        for item in table_data:
            print(item)

        await browser.close()

asyncio.run(scrape_table())
