// crawler.ts
import { firefox, type Page } from 'playwright';
import { createStorage, type Storage } from 'unstorage';
import fsDriver from 'unstorage/drivers/fs';

// Create a storage instance that saves files to the "mirror" directory
const storage: Storage<string> = createStorage({
    driver: fsDriver({ base: './mirror' }),
});

/**
 * Generate an array of (contestId, problemId) objects within the specified start and end range.
 * @param startPid - Starting problemId (inclusive)
 * @param endPid - Ending problemId (inclusive)
 * @returns Array<{ pid: number }>
 */
const generateTasks = (startPid: number, endPid: number): Array<{ pid: number }> => {
    if (startPid > endPid) {
        throw new Error('startPid 不能大于 endPid');
    }
    return Array.from({ length: endPid - startPid + 1 }, (_, i) => ({ pid: startPid + i }));
};

/**
 * Build the URL for a given contest/problem pair.
 */
const buildUrl = (pid: number): string => `https://vjudge.net/problem/%E6%B4%9B%E8%B0%B7-P${pid}`;

/**
 * Build the storage key for saving HTML.
 */
const buildStorageKey = (pid: number): string => `problem/P${pid}.html`;

/**
 * Crawl a single page and store its HTML.
 */
const crawlPage = async (page: Page, pid: number): Promise<void> => {
    const url = buildUrl(pid);
    console.log(`Crawling: ${url}`);

    try {
        // Navigate to the target URL and wait until network is idle
        await page.goto(url, { waitUntil: 'networkidle' });

        // Get the full HTML content of the page
        const html = await page.content();

        // Save the HTML to the storage
        await storage.setItem(buildStorageKey(pid), html);
        console.log(`Saved: ${buildStorageKey(pid)}`);
    } catch (err) {
        if (err instanceof Error) {
            console.error(`Failed: ${url}`, err.message);
        } else {
            console.error(`Failed: ${url}`, err);
        }
    }
};

/**
 * Main crawl function (functional style).
 */
export const crawl = async (): Promise<void> => {
    const fx = await firefox.launch();
    const context = await fx.newContext();

    context.addCookies([
        {
            name: 'JSESSIONlD',
            value: 'lnv|420G59YS91R1KEN8I3H3G48RQTONIM',
            domain: 'www.vjudge.net',
            path: '/',
        },
    ]);

    const page = await context.newPage();

    // Generate all crawl tasks
    const tasks = generateTasks(1000, 9999);

    // Sequentially process each task
    for (const { pid } of tasks) {
        await crawlPage(page, pid);
    }

    await fx.close();
};
