/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-08-05 14:09:03
 * @LastEditors: ider
 * @LastEditTime: 2021-08-05 14:26:10
 * @Description: debug 找出之前的问题 page 重新加入 queue
 */

const cheerio = require('cheerio')
const Apify = require('apify')
const { log } = Apify.utils;
log.setLevel(log.LEVELS.DEBUG);


Apify.main(async () => {
    // Create and initialize an instance of the RequestList class that contains
    // a list of URLs to crawl. Here we use just a few hard-coded URLs.


    
    const cpuIndexStore = await Apify.openKeyValueStore('cpu_index')
    const requestQueueCpuDetail = await Apify.openRequestQueue('cpu-detail')

    const ReGPUDetailStore = await Apify.openKeyValueStore('re_cpu_detail');

    let cpu_url_obj = {}
    let cpu_index_list = []
    let cpu_finsh_Set = new Set()
    await cpuIndexStore.forEachKey(async (key, index, info) => {
        let data = await cpuIndexStore.getValue(key)
        let name = data.Name.replace("Intel ","").replace("AMD ","")
        cpu_index_list.push(name)
        cpu_url_obj[name] = data
    });

    
    await ReGPUDetailStore.forEachKey(async (key, index, info) => {
        console.log(key)
        let data = await ReGPUDetailStore.getValue(key)
        let name = data.name.replace("Intel ","").replace("AMD ","")
        cpu_finsh_Set.add(name)
    });
    await Apify.utils.sleep(10 * 1000);
    for (let name of cpu_index_list){
        if (!cpu_finsh_Set.has(name)){
            requestQueueCpuDetail.addRequest({
                url:`https://www.techpowerup.com${cpu_url_obj[name].Url}`,
                userData:cpu_url_obj[name]
            })
        }
    }
    console.log('Crawler finished.');
});