/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2020-05-07 21:25:11
 * @LastEditors: ider
 * @LastEditTime: 2020-05-10 00:22:15
 * @Description: 
 */

const cheerio = require('cheerio')
const Apify = require('apify')
const { sendMail } = require('./util.js')
const { log } = Apify.utils;
log.setLevel(log.LEVELS.DEBUG);
 // Prepare a list of URLs to crawl

 
Apify.main(async () => {
    const requestQueuePage = await Apify.openRequestQueue('gpu_page')
    const dataStore = await Apify.openKeyValueStore('gpu_detail');
    const htmlStore = await Apify.openKeyValueStore('gpu_detail_html');

    const extract = async (html) =>{
        let retDict = {}
        let $ = cheerio.load(html)
        $('.sectioncontainer  section.details').each((index, el) => {
            let top_key = $(el).find('h2').text().trim()
            if (['Integrated Graphics','Mobile Graphics','Graphics Processor','Graphics Card','Clock Speeds','Memory','Board Design','Render Config','Theoretical Performance','Graphics Features'].indexOf(top_key)>-1){
                retDict[top_key] = {}
                $(el).find('dl.clearfix').each((ij,elj)=>{
                    let key = $(elj).find('dt').text().replace(/:/g,'').trim()
                    let value = $(elj).find('dd').text().trim()
                    retDict[top_key][key] = value
                })
                // for (let tag of $(el).find('dl.clearfix')){
                //     let key = $(tag).find('dt').text().replace(/:/g,'').trim()
                //     let value = $(tag).find('dd').text().trim()
                //     retDict[top_key][key] = value
                // }
            }
        });
        return retDict
    }

    // Crawl the URLs
    const crawler = new Apify.PuppeteerCrawler({
        requestQueue:requestQueuePage,    
        launchPuppeteerOptions: {
            // For example, by adding "slowMo" you'll slow down Puppeteer operations to simplify debugging
            slowMo: 100,
            // proxyUrl: 'http://192.168.0.55:1080',
            proxyUrl: 'http://127.0.0.1:31188',
            handleSIGINT: true, // so Chrome doesn't exit when we quit Node.
            headless: false, // to see what's happening
            ignoreHTTPSErrors: true
            //    executablePath: '/usr/bin/chromium',
            //  args: ['--ignore-certificate-errors','--no-sandbox','--proxy-server=127.0.0.1:44249']
            //  args: ['--ignore-certificate-errors','--no-sandbox','--proxy-server=192.168.1.224:8087']
          },
        maxRequestRetries: 1000,
        handlePageTimeoutSecs: 180,
        gotoTimeoutSecs:180,
        maxConcurrency: 1,
        useSessionPool:true,
        gotoFunction: async ({ request, page }) => {
          await Apify.utils.puppeteer.addInterceptRequestHandler(page, (request) => {
            if (['image', 'stylesheet', 'font', 'script'].indexOf(request.resourceType()) !== -1)
            // if (['image'].indexOf(request.resourceType()) !== -1) 
            { request.abort() } else { request.continue() }
          })
          // await Apify.utils.puppeteer.blockRequests(page, {urlPatterns:[".css", ".jpg", ".jpeg", ".png", ".svg", ".woff", ".pdf", ".zip"]})
          return Apify.utils.puppeteer.gotoExtended(page, request, { waitUntil: 'networkidle2', timeout: 60000 })
        },

        handlePageFunction: async ({ request,response,page }) => {
            // This function is called to extract data from a single web page
            // 'page' is an instance of Puppeteer.Page with page.goto(request.url) already called
            // 'request' is an instance of Request class with information about the page to load
            // await page.goto('http://www.qq.com', {waitUntil: 'networkidle2'});
            // await page.pdf({path: 'hn.pdf', format: 'A4'});
            // await page.waitFor('.gallery-image-high-res.loaded')
            // await page.focus('.gallery-image-high-res.loaded')
            // for (i = 1; i < 20; i++) {
            //   await page.evaluate(`document.querySelector('.section-layout.section-scrollbox').scrollTop=${i * 2000}`)
            //   await page.waitFor(500)
            // }
            // const ret_data = parse_img(await page.content())
            // if (ret_data && ret_data.size > 1) { await saveImg(request.userData, ret_data) }

            let html = await response.text()
            if (html.indexOf("GPU Database")<0){
                log.warning("没有正常出现页面")
                await sendMail("no suject key","需要更换ip")
                await Apify.utils.sleep(9999999);
            }
            let retDict = await extract(html)
            // console.log(retDict)
            // await Apify.utils.sleep(9999999)
            let key = request.url.split('/').pop()
            await dataStore.setValue(key,retDict);
            await htmlStore.setValue(key,html,{ contentType: 'text/html' })
          },
          
        handleFailedRequestFunction: async ({ request ,error}) => {
            log.debug(`Request ${request.url} failed twice.${error}`);
        },
    });

    await crawler.run();

});