const Crawler = require('crawler');
const siteLogic = require('./business-logic/site');
const crawlerUtil = require('./crawler/util');
const global = require('./crawler/global');
const keywordLogic = require('./business-logic/keyword');
const statusLogic = require('./business-logic/status');
const email = require('./email/email');
require('./crawler/guard')

let crawler = new Crawler({
    maxConnections: 1
});

async function start() {
    email.reset();
    email.offSend();
    console.log('爬虫开始......');
    await statusLogic.updateCrawlerState(1);
    let sites = await siteLogic.queryAll();
    global.setSites(sites);
    let keywords = await keywordLogic.queryAll();
    global.setKeyword(keywords);
    console.log('获取数据完毕......');
    for (const keyword of keywords) {
        for (const site of sites) {
            let options = crawlerUtil.getCrawlerOptions(site, keyword);
            crawler.queue(options);
        }
    }
}

(async function() {
    await start();
    crawler.on('drain', function() {
        console.log('爬取完毕，准备发送邮件......');
        email.listenSend();
        statusLogic.updateCrawlerState(0)
            .then(ret => {
                return statusLogic.query()
            })
            .then(status => {
                console.log(`发送邮件完毕，${status.interval}分钟后将会再次启动......\n`);
                setTimeout(start, status.interval * 60 * 1000);
            })
    });
})();