/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2020-05-07 21:25:11
 * @LastEditors: ider
 * @LastEditTime: 2020-05-08 12:08:45
 * @Description: 
 */

const cheerio = require('cheerio')
const Apify = require('apify')
const { sendMail } = require('./util.js')
const { log } = Apify.utils;
log.setLevel(log.LEVELS.DEBUG);
 // Prepare a list of URLs to crawl

 
Apify.main(async () => {
    const requestQueueIndex = await Apify.openRequestQueue('gpu_index')
    const requestQueuePage = await Apify.openRequestQueue('gpu_page')
    for(let Manufacturer of ['AMD','Intel','ATI','Intel','Matrox','NVIDIA','XGI']){
        ['Yes','No'].forEach(Mobile=>{
            ['Yes','No'].forEach(Workstation=>{
                requestQueueIndex.addRequest({ url: `https://www.techpowerup.com/gpu-specs/?mfgr=${Manufacturer}&mobile=${Mobile}&workstation=${Workstation}&sort=name`,
                                                    userData:{'Manufacturer':Manufacturer,'Mobile':Mobile,'Workstation':Workstation, handle:2}})
            })
        })
    };
    await Apify.utils.sleep(1500);
    const dataset = await Apify.openDataset('gpu');

    const extract = async (html,userData) =>{
        let $ = cheerio.load(html)
        $('table.processors tr').each(async (index, el) => {
            let tds = $(el).find('td')
            if (tds.length == 8 ){
                let retDict = {
                    'Product Name':$(tds[0]).text().trim(),
                    'GPU Chip':$(tds[1]).text().trim(),
                    'Released':$(tds[2]).text().trim(),
                    'Bus':$(tds[3]).text().trim(),
                    'Memory':$(tds[4]).text().trim(),
                    'GPU clock':$(tds[5]).text().trim(),
                    'Memory clock':$(tds[6]).text().trim(),
                    'Shaders / TMUs / ROPs':$(tds[7]).text().trim(),
                    'URL':$(tds[0]).find('a').attr('href')
                }
                if (retDict.URL.startsWith('/')){
                    retDict.URL = "https://www.techpowerup.com" +  retDict.URL
                }
                delete userData["handle"]
                Object.assign(retDict,userData)

                // todo checkout URL
                await requestQueuePage.addRequest({ url: retDict.URL,userData:retDict})
                await dataset.pushData(retDict);
            }
        });
    }

    // Crawl the URLs
    const crawler = new Apify.PuppeteerCrawler({
        requestQueue:requestQueueIndex,    
        launchPuppeteerOptions: {
            // For example, by adding "slowMo" you'll slow down Puppeteer operations to simplify debugging
            slowMo: 100,
            // proxyUrl: 'http://192.168.0.55:1080',
            // proxyUrl: 'http://192.168.0.160:6666',
            handleSIGINT: true, // so Chrome doesn't exit when we quit Node.
            headless: true, // to see what's happening
            ignoreHTTPSErrors: true
            //    executablePath: '/usr/bin/chromium',
            //  args: ['--ignore-certificate-errors','--no-sandbox','--proxy-server=127.0.0.1:44249']
            //  args: ['--ignore-certificate-errors','--no-sandbox','--proxy-server=192.168.1.224:8087']
          },
        maxRequestRetries: 1000,
        handlePageTimeoutSecs: 60,
        gotoTimeoutSecs:60,
        maxConcurrency: 1,
        useSessionPool:true,
        gotoFunction: async ({ request, page }) => {
          await Apify.utils.puppeteer.addInterceptRequestHandler(page, (request) => {
        //     // if (['image', 'stylesheet', 'font', 'script'].indexOf(request.resourceType()) !== -1)
            if (['image'].indexOf(request.resourceType()) !== -1) { request.abort() } else { request.continue() }
          })
          // await Apify.utils.puppeteer.blockRequests(page, {urlPatterns:[".css", ".jpg", ".jpeg", ".png", ".svg", ".woff", ".pdf", ".zip"]})
          return Apify.utils.puppeteer.gotoExtended(page, request, { waitUntil: 'networkidle2', timeout: 60000 })
        },

        handlePageFunction: async ({ request,response,page }) => {
            // This function is called to extract data from a single web page
            // 'page' is an instance of Puppeteer.Page with page.goto(request.url) already called
            // 'request' is an instance of Request class with information about the page to load
            // await page.goto('http://www.qq.com', {waitUntil: 'networkidle2'});
            // await page.pdf({path: 'hn.pdf', format: 'A4'});
            // await page.waitFor('.gallery-image-high-res.loaded')
            // await page.focus('.gallery-image-high-res.loaded')
            // for (i = 1; i < 20; i++) {
            //   await page.evaluate(`document.querySelector('.section-layout.section-scrollbox').scrollTop=${i * 2000}`)
            //   await page.waitFor(500)
            // }
            // const ret_data = parse_img(await page.content())
            // if (ret_data && ret_data.size > 1) { await saveImg(request.userData, ret_data) }

            let html = await response.text()
            if (html.indexOf("GPU Specs Database")<0){
                log.warning("没有正常出现页面")
                await sendMail("no suject key","需要更换ip")
                await Apify.utils.sleep(9999999);
            }
            let $ = cheerio.load(html)
            if (request.userData.handle == 2){
                await $('#released option').each(async (index, el) => {
                    let year = $(el).attr('value')
                    if (!year){
                        return
                    }
                    let Manufacturer = request.userData.Manufacturer
                    let Mobile = request.userData.Mobile
                    let Workstation = request.userData.Workstation
                    log.info(`new index queue: https://www.techpowerup.com/gpu-specs/?mfgr=${Manufacturer}&mobile=${Mobile}&workstation=${Workstation}&released=${year}&sort=name`)
                    await requestQueueIndex.addRequest({ url: `https://www.techpowerup.com/gpu-specs/?mfgr=${Manufacturer}&mobile=${Mobile}&workstation=${Workstation}&released=${year}&sort=name`,
                                                    userData:{'Manufacturer':Manufacturer,'Mobile':Mobile,'Workstation':Workstation,handle:2}})
            
                });
            }
            await extract(html,request.userData)
          },
          
        handleFailedRequestFunction: async ({ request ,error}) => {
            log.debug(`Request ${request.url} failed twice.${error}`);
        },
    });

    await crawler.run();

});