/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-07-21 13:51:44
 * @LastEditors: ider
 * @LastEditTime: 2021-08-04 16:02:53
 * @Description: 
 */

const Apify = require('apify')
const cheerio = require('cheerio')
const nanoid = require('nanoid')
const {
    log
} = Apify.utils
log.setLevel(log.LEVELS.INFO)

Apify.main(async () => {
    const requestQueueCpuIndex = await Apify.openRequestQueue('cpu-index')
    const requestQueueCpuDetail = await Apify.openRequestQueue('cpu-detail')
    const rootRequestList = await Apify.openRequestList('root-url', [{
        url: 'https://www.cpu-monkey.com/en/cpus',
    }, ])

    const rootCrawler = new Apify.PlaywrightCrawler({
        requestList: rootRequestList,
        maxConcurrency: 4,
        maxRequestRetries: 1000,
        handlePageFunction: async ({
            page,
            request
        }) => {
            log.info('into')
            let html = await page.content()
            // log.info(html)
            let $ = cheerio.load(html)
            await $('.full_col.frame ul li a').each(async (index, el) => {
                let href = $(el).attr('href')
                let name = $(el).text()
                if (!href) {
                    return
                }
                log.info(`new index queue:${name}: ${href}`)
                requestQueueCpuIndex.addRequest({
                    url: `https://www.cpu-monkey.com/en/${href}`,
                    userData: {
                        Name: name,
                    },
                })
            })
        },
        handleFailedRequestFunction: async ({
            request
        }) => {
            // This function is called when the crawling of a request failed too many times
            await Apify.pushData({
                url: request.url,
                succeeded: false,
                errors: request.errorMessages,
            })
        },
    })

    await rootCrawler.run()

    const indexCrawler = new Apify.PlaywrightCrawler({
        requestQueue: requestQueueCpuIndex,
        maxConcurrency: 4,
        maxRequestRetries: 1000,
        handlePageFunction: async ({
            page,
            request
        }) => {
            while (true) {
                try {
                    await page.waitForSelector('#load_cpus', {
                        state: 'attached',
                        timeout: 10000,
                    })
                    await page.click('#load_cpus')
                } catch (error) {
                    break
                }
            }
            let html = await page.content()
            let $ = cheerio.load(html)
            await $('.data.vergleich tr > td:nth-child(1) a').each(
                async (index, el) => {
                    let href = $(el).attr('href')
                    let name = $(el).text()
                    if (!href) {
                        return
                    }
                    log.info(`new index queue:${name}: ${href}`)

                    requestQueueCpuDetail.addRequest({
                        url: `https://www.cpu-monkey.com/en/${href}`,
                        userData: {
                            Name: name,
                        },
                    })
                }
            )
        },
        handleFailedRequestFunction: async ({
            request
        }) => {
            // This function is called when the crawling of a request failed too many times
            await Apify.pushData({
                url: request.url,
                succeeded: false,
                errors: request.errorMessages,
            })
        },
    })

    await indexCrawler.run()

    const testRequestList = await Apify.openRequestList('test-url', [
			{
				url: 'https://www.cpu-monkey.com/en/cpu-intel_core_i7_11375h-1892',
				userData: {
					Name: 'Intel Core i7-11375H',
				},
			},
		])

    const dataStore = await Apify.openKeyValueStore('cpuDetail')

    const cpuDetailCrawler = new Apify.PlaywrightCrawler({
        requestQueue: requestQueueCpuDetail,
        // requestList: testRequestList,
        maxConcurrency: 4,
        maxRequestRetries: 1000,
        handlePageFunction: async ({
            page,
            request
        }) => {
            let html = await page.content()
            let $ = cheerio.load(html)
            let topTitle = "root"
            let retData = {}
            let trs = $('.cpu_col2 .start.data').find("tr")
            for (let tr of trs.toArray()) {
                let td = $(tr).find('td')
                if (td.length === 1) {
                    let title = $(td).find('h2').text().trim()
                    if (title.length > 0) {
                        log.info(
                            `new index queue:${td.length}, title: ${title}`
                        )
                        topTitle = title
                        retData[topTitle] = {}

                    }
                }
                if (td.length === 2) {
                    let key = $(td[0]).text().replace(':', '').trim()
                    let value = $(td[1]).text().trim()
                    if (key.length > 0)
                     retData[topTitle][key] = value
                }
                if (td.length === 4) {
                    let key = $(td[0]).text().replace(':', '').trim()
                    let value = $(td[1]).text().trim()
                    if (key.length > 0)
                    retData[topTitle][key] = value
                    key = $(td[2]).text().replace(':', '').trim()
                    value = $(td[3]).text().trim()
                    if (key.length > 0)
                     retData[topTitle][key] = value
                }
            }
            let score = {}
            $("[id^='bench_']").each(async (index, el) => {
							let ss = $(el).find('.benchmarkbar_active')
							let key = $(el).attr('id')
							score[key] = ss.text()
						})
            retData['benchmark'] = score
            retData['name'] = request.userData['Name']
            let nnid = nanoid.nanoid()
            log.info("nnid",nnid)
            await dataStore.setValue(nnid, retData)

        },
        handleFailedRequestFunction: async ({
            request
        }) => {
            // This function is called when the crawling of a request failed too many times
            await Apify.pushData({
                url: request.url,
                succeeded: false,
                errors: request.errorMessages,
            })
        },
    })

    await cpuDetailCrawler.run()
})