

var Crawler = require('crawler');
var articles = require('./models/articles'); // 创建一个表

var c = new Crawler({
    // `maxConnections` 将被强制修改为 1
    maxConnections: 10,
    //这将为每个爬取的页面调用 
    callback: function (error, res, done) {
        if (error) {
            console.log(error);
        } else {
            var $ = res.$;

            var list = $('#content .article table').toArray();

            list.forEach(element => {
                //add title
                var title = $(element).find(' .pl2 a').text().trim().replace(/[ \r\n]/g, '');
                console.log(title);

                //add author;
                var author = $(element).find(' .pl').text().trim().replace(/[ \r\n]/g, '');
                console.log(author);

                //add introduce
                var introdtion = $(element).find(' span.inq').text().trim().replace(/[ \r\n]/g, '');
                console.log(introdtion);

                //add all
                articles.create({ title: title, author: author, introdtion: introdtion }).then((row)=>{
                    console.log('添加的ID：'+row.id);
                })

            })
        }
        done();
    }
});

// Queue just one URL, with default callback
// 队列只有一个url，默认回调 

module.exports=(url)=>{
    c.queue(url)
}
