/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-08-04 14:09:58
 * @LastEditors: ider
 * @LastEditTime: 2021-08-05 14:56:11
 * @Description: get all cpu links
 */

const Apify = require('apify')
const cheerio = require('cheerio')
const nanoid = require('nanoid')
const crypto = require('crypto')
const { assert } = require('console')
const {log} = Apify.utils

log.setLevel(log.LEVELS.INFO)

const getHash = (name)=> crypto.createHash('md5').update(name).digest('hex')

Apify.main(async () => {
    // const proxyConfiguration = await Apify.createProxyConfiguration({ proxyUrls:['http://192.168.1.229:6600','http://192.168.1.229:6601'] });
    // const proxyConfiguration = await Apify.createProxyConfiguration({ proxyUrls:['http://192.168.1.229:6600','http://ider:123456@www.iwuzhen.top:12344','http://192.168.1.223:10809'] });
    // const proxyConfiguration = await Apify.createProxyConfiguration({ proxyUrls:['http://192.168.1.220:3128','http://192.168.1.223:10809'] });
    const proxyConfiguration = await Apify.createProxyConfiguration({ proxyUrls:['http://ider:123456@www.iwuzhen.top:12340'] });

    const cpuDetailHtmlStore = await Apify.openKeyValueStore('cpu_detail_html')
    const requestQueueCpuDetail = await Apify.openRequestQueue('cpu-detail')

    const rootCrawler = new Apify.PlaywrightCrawler({
        requestQueue: requestQueueCpuDetail,
        maxConcurrency: 1,
        maxRequestRetries: 1000,
        proxyConfiguration,    
        useSessionPool: false,
        navigationTimeoutSecs:360,
        persistCookiesPerSession: false,
        preNavigationHooks: [
            async (crawlingContext, gotoOptions) => {
                // 使用快照功能
                // let url = crawlingContext.request.url.replace("https://webcache.googleusercontent.com/search?q=cache:","https://")
                // crawlingContext.request.url = `https://webcache.googleusercontent.com/search?q=cache:${url}`
                // console.log(crawlingContext)
                // console.log(gotoOptions)
                // await page.evaluate((attr) => { window.foo = attr; }, 'bar');
            }
        ],
        handlePageFunction: async ({
            page,
            request
        }) => {
            log.info('start')
            let html = await page.content()
            let $ = cheerio.load(html)
            if ($("h1").text() === "HTTP 429 - Too Many Requests"){
                log.info("HTTP 429 - Too Many Requests, wait 180s")
                await Apify.utils.sleep(180 * 1000);
                throw "not cpu page"
            }
            let flag = $('#page > article > ul > li:nth-child(1) > a > span').text().trim()
    
            if (flag !== "CPU Database"){
                throw "not cpu page"
            }
            
            await cpuDetailHtmlStore.setValue(getHash(request.userData.Url),html,{ contentType: 'text/html' })
            await Apify.utils.sleep(5 * 1000);
            // await Apify.utils.sleep(25 * 1000);
        },
        handleFailedRequestFunction: async ({
            request
        }) => {
            // This function is called when the crawling of a request failed too many times
            await Apify.utils.sleep(120 * 1000);
            await Apify.pushData({
                url: request.url,
                succeeded: false,
                errors: request.errorMessages,
            })
        },
    })

    await rootCrawler.run()
})