/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2020-05-07 21:25:11
 * @LastEditors: ider
 * @LastEditTime: 2020-05-28 15:34:47
 * @Description: 
 */

const cheerio = require('cheerio')
const Apify = require('apify')
const { sendMail } = require('./util.js')
const { log } = Apify.utils;
log.setLevel(log.LEVELS.DEBUG);
 // Prepare a list of URLs to crawl

 
Apify.main(async () => {
    const requestQueue = await Apify.openRequestQueue('cpu_benchmark-cinebench_r20_single_core-9')
    const requestList = await Apify.openRequestList('cpu_benchmark-cinebench_r20_single_core-9', [
        'https://www.cpu-monkey.com/en/cpu_benchmark-cinebench_r20_single_core-9',
    ]);
    // requestQueueIndex.addRequest({ url: `https://www.techpowerup.com/gpu-specs/?mfgr=${Manufacturer}&mobile=${Mobile}&workstation=${Workstation}&sort=name`,
    //                                                 userData:{'Manufacturer':Manufacturer,'Mobile':Mobile,'Workstation':Workstation, handle:2}})

    await Apify.utils.sleep(1500);
    const dataStore = await Apify.openKeyValueStore('cpu_benchmark-cinebench_r20_single_core-9')
    const htmlStore = await Apify.openKeyValueStore('cpu_benchmark-cinebench_r20_single_core-9_html');

    // Crawl the URLs
    const crawlerIndex = new Apify.PuppeteerCrawler({
        requestList:requestList,    
        launchPuppeteerOptions: {
            slowMo: 100,
            handleSIGINT: true, // so Chrome doesn't exit when we quit Node.
            headless: false, // to see what's happening
            ignoreHTTPSErrors: true
          },
        maxRequestRetries: 1000,
        handlePageTimeoutSecs: 60,
        gotoTimeoutSecs:60,
        maxConcurrency: 1,
        useSessionPool:true,
        gotoFunction: async ({ request, page }) => {
          await Apify.utils.puppeteer.addInterceptRequestHandler(page, (request) => {
        //     // if (['image', 'stylesheet', 'font', 'script'].indexOf(request.resourceType()) !== -1)
            if (['image'].indexOf(request.resourceType()) !== -1) { request.abort() } else { request.continue() }
          })
          // await Apify.utils.puppeteer.blockRequests(page, {urlPatterns:[".css", ".jpg", ".jpeg", ".png", ".svg", ".woff", ".pdf", ".zip"]})
          return Apify.utils.puppeteer.gotoExtended(page, request, { waitUntil: 'networkidle2', timeout: 60000 })
        },

        handlePageFunction: async ({ request,response,page }) => {
            let html = await response.text()
            let $ = cheerio.load(html)

            $("table.data tr").each((index,item)=>{
     
                let Data = {
                    href: $(item).find("td:nth-child(1) > a").attr('href'),
                    name: $(item).find("td:nth-child(2) > a").text().trim(),
                    perf: $(item).find("td:nth-child(2) > span").text().trim(),
                    score: $(item).find("td:nth-child(3)").text().trim(),
                }
                if (Data.href){
                    log.info(JSON.stringify(Data))
                    requestQueue.addRequest({ url: `https://www.cpu-monkey.com/en/${Data.href}`,userData:Data})
                }
                
            })

          },
          
        handleFailedRequestFunction: async ({ request ,error}) => {
            log.debug(`Request ${request.url} failed twice.${error}`);
        },
    });

    await crawlerIndex.run();

        // Crawl the URLs
        const crawler = new Apify.PuppeteerCrawler({
            requestQueue:requestQueue,    
            launchPuppeteerOptions: {
                // For example, by adding "slowMo" you'll slow down Puppeteer operations to simplify debugging
                slowMo: 100,
                // proxyUrl: 'http://192.168.0.55:1080',
                // proxyUrl: 'http://192.168.0.160:6666',
                handleSIGINT: true, // so Chrome doesn't exit when we quit Node.
                headless: false, // to see what's happening
                ignoreHTTPSErrors: true
                //    executablePath: '/usr/bin/chromium',
                //  args: ['--ignore-certificate-errors','--no-sandbox','--proxy-server=127.0.0.1:44249']
                //  args: ['--ignore-certificate-errors','--no-sandbox','--proxy-server=192.168.1.224:8087']
              },
            maxRequestRetries: 1000,
            handlePageTimeoutSecs: 60,
            gotoTimeoutSecs:60,
            maxConcurrency: 3,
            useSessionPool:true,
            gotoFunction: async ({ request, page }) => {
              await Apify.utils.puppeteer.addInterceptRequestHandler(page, (request) => {
            //     // if (['image', 'stylesheet', 'font', 'script'].indexOf(request.resourceType()) !== -1)
                if (['image'].indexOf(request.resourceType()) !== -1) { request.abort() } else { request.continue() }
              })
              // await Apify.utils.puppeteer.blockRequests(page, {urlPatterns:[".css", ".jpg", ".jpeg", ".png", ".svg", ".woff", ".pdf", ".zip"]})
              return Apify.utils.puppeteer.gotoExtended(page, request, { waitUntil: 'networkidle2', timeout: 60000 })
            },
    
            handlePageFunction: async ({ request,response,page }) => {
    
                let html = await response.text()
                let $ = cheerio.load(html)
                let RetDate = {}

                
                $("table.cpu tr").each((index,item)=>{

                    tds = $(item).find("td")
                    if (tds.length ==2){
                        RetDate[$(item).find("td:nth-child(1)").text().replace(":","").trim()] = $(item).find("td:nth-child(2)").text().trim()
                    }
                    if (tds.length ==4){
                        RetDate[$(item).find("td:nth-child(1)").text().replace(":","").trim()] = $(item).find("td:nth-child(2)").text().trim()
                        RetDate[$(item).find("td:nth-child(3)").text().replace(":","").trim()] = $(item).find("td:nth-child(4)").text().trim()
                    }
                    
                })
                RetDate =  Object.assign(request.userData,RetDate)
                log.info(JSON.stringify(RetDate))
                let key = request.url.split('/').pop()
                await dataStore.setValue(key,RetDate);
                await htmlStore.setValue(key,html,{ contentType: 'text/html' })
              },
              
            handleFailedRequestFunction: async ({ request ,error}) => {
                log.debug(`Request ${request.url} failed twice.${error}`);
            },
        });
    
        await crawler.run();
        

});