let sourceName = "搜狐新闻";
let myEncoding = "gbk";
let seedURL = 'https://www.sohu.com/news';

let seedURL_format = "$('a')";
let keywords_format = " $('meta[name=\"keywords\"]').eq(0).attr(\"content\")";
let title_format = "$('title').text()";
let date_format = " $('meta[itemprop=\"datePublished\"]').eq(0).attr(\"content\")";
let author_format = "$('#editor_baidu').text()";
let content_format = "$('.article').text()";
let desc_format = " $('meta[name=\"description\"]').eq(0).attr(\"content\")";
let source_format = " $('meta[name=\"mediaid\"]').eq(0).attr(\"content\")";
let url_reg = /\/a\/(\d{9})_(\d{6,9})/;
let regExp = /((\d{4}|\d{2})(\-|\/|\.)\d{1,2}\3\d{1,2})|(\d{4}年\d{1,2}月\d{1,2}日)/;
let fs = require('fs');
let myRequest = require('request')
let myCheerio = require('cheerio')
let myIconv = require('iconv-lite')
let utils = require('../utils');
let config = require('../config.json')
require('date-utils');
//防止网站屏蔽我们的爬虫
let headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1'
}
//request模块异步fetch url
function request(url, callback) {
    let options = {
        url: url,
        encoding: null,
        port: config.CRAWLERPORT1,
        //proxy: 'http://x.x.x.x:xxxx',
        headers: headers,
        timeout: 10000 //
    }
    myRequest(options, callback)
}
//request模块异步fetch url
function request(url, callback) {
    let options = {
        url: url,
        encoding: null,
        //proxy: 'http://x.x.x.x:8080',
        headers: headers,
        timeout: 10000 //
    }
    myRequest(options, callback)
}
function startSohuGet(pool) {
    return new Promise((resolve, reject) => {
        request(seedURL, function(err, res, body) { //读取种子页面
            // try {
            //用iconv转换编码
            let html = myIconv.decode(body, myEncoding);
            //console.log(html);
            //准备用cheerio解析html
            let $ = myCheerio.load(html, { decodeEntities: true });
            // } catch (e) { console.log('读种子页面并转码出错：' + e) };
        
            let seedurl_news;
        
            try {
                seedurl_news = eval(seedURL_format);
                //console.log(seedurl_news);
            } catch (e) { console.log('url列表所处的html块识别出错：' + e) };
        
            seedurl_news.each(function(i, e) { //遍历种子页面里所有的a链接
                let myURL = "";
                try {
                    //得到具体新闻url
                    let href = "";
                    href = $(e).attr("href");
                    if (typeof(href) == "undefined") {  // 有些网页地址undefined
                        return true;
                    }
                    if (href.toLowerCase().indexOf('http://') >= 0 || href.toLowerCase().indexOf('https://') >= 0) myURL = href; //http://开头的或者https://开头
                    else if (href.startsWith('//')) myURL = 'http:' + href; ////开头的
                    else myURL = seedURL.substr(0, seedURL.lastIndexOf('/') + 1) + href; //其他
        
                } catch (e) { console.log('识别种子页面中的新闻链接出错：' + e) }
        
                if (!url_reg.test(myURL)) return; //检验是否符合新闻url的正则表达式
                // console.log(myURL);
                // newsGet(pool, myURL)
                let fetch_url_Sql = ['use smartSystem;select count(*) from newsinfo where newsUrl=?'];
                let params = [[myURL]]
                utils.startTransaction(pool, fetch_url_Sql, params).then((rv) => {
                    console.log(rv)
                    if (rv[0][1].length > 0) {
                        console.log('URL duplicate!')
                        resolve("爬取完毕!")
                    } else {
                        
                        newsGet(pool, myURL); //读取新闻页面
                    }  
            });
            });
        });
    })
}


function newsGet(pool, myURL) { //读取新闻页面
    
    request(myURL, function(err, res, body) { //读取新闻页面
        //try {
        let html_news = myIconv.decode(body, myEncoding); //用iconv转换编码
        //console.log(html_news);
        //准备用cheerio解析html_news
        let $ = myCheerio.load(html_news, { decodeEntities: true });
        myhtml = html_news;
        //} catch (e) {    console.log('读新闻页面并转码出错：' + e);};

        console.log("转码读取成功:" + myURL);
        //动态执行format字符串，构建json对象准备写入文件或数据库
        let fetch = {};
        fetch.title = "";
        fetch.content = "";
        fetch.publish_date = (new Date()).toFormat("YYYY-MM-DD");
        //fetch.html = myhtml;
        fetch.url = myURL;
        fetch.sourceName = sourceName;
        fetch.sourceEncoding = myEncoding; //编码
        fetch.crawltime = new Date();
       
        if (keywords_format == "") fetch.keywords = sourceName; // eval(keywords_format);  //没有关键词就用sourcename
        else fetch.keywords = eval(keywords_format);

        if (title_format == "") fetch.title = ""
        else fetch.title = eval(title_format); //标题

        if (date_format != "") fetch.publish_date = eval(date_format); //刊登日期   
        console.log('date: ' + fetch.publish_date);
        console.log(myURL);
        if (fetch.title == '') {
            console.log("此文章不存在");
            return;
        }
        
        fetch.publish_date = regExp.exec(fetch.publish_date)[0];
        fetch.publish_date = fetch.publish_date.replace('年', '-')
        fetch.publish_date = fetch.publish_date.replace('月', '-')
        fetch.publish_date = fetch.publish_date.replace('日', '')
        fetch.publish_date = new Date(fetch.publish_date).toFormat("YYYY-MM-DD");

        if (author_format == "") fetch.author = sourceName; //eval(author_format);  //作者
        else fetch.author = eval(author_format);

        if (content_format == "") fetch.content = "";
        else fetch.content = eval(content_format).replace("\r\n" + fetch.author, ""); //内容,是否要去掉作者信息自行决定

        if (source_format == "") fetch.source = fetch.sourceName;
        else fetch.source = eval(source_format).replace("\r\n", ""); //来源

        if (desc_format == "") fetch.desc = fetch.title;
        else fetch.desc = eval(desc_format).replace("\r\n", ""); //摘要    
        let fetchAddSql = ['INSERT INTO newsinfo(newsUrl,sourceName,sourceEncoding,title,' +
            'keywords,author,publishDate,crawlTime,content,newsId) VALUES(?,?,?,?,?,?,?,?,?,null);']
        let fetchAddSql_Params = [
            [fetch.url, fetch.sourceName, fetch.sourceEncoding,
            fetch.title, fetch.keywords, fetch.author, fetch.publish_date,
            fetch.crawltime.toFormat("YYYY-MM-DD HH24:MI:SS"), fetch.content]
        ]
        utils.startTransaction(pool, fetchAddSql, fetchAddSql_Params).then((rv) => {
            }).catch((err) => {
                console.error(err)
            })
    });
}
module.exports = {
    startSohuGet
    }