const request = require('superagent')
const superagent = require('superagent-charset')(request)
const cheerio = require('cheerio')
const uuid = require('uuid')
const thread = require('async')
const {
    query
} = require('../tools/dbTools')

/**
 * 处理业务
 * @param {Object} ctx ctx对象
 */
const crawler = async (ctx) => {
    const linkPool = createLinkPool()

    for (let i = 0; i < linkPool.length; i++) {
        const html = await requestPage(linkPool[i])
        const detailUrls = await getDetailUrls(html)
        const builds = []
        for (let j = 0; j < detailUrls.length; ++j) {
            const html = await requestPage(detailUrls[j])
            const build = await dataHandler(html, detailUrls[j])
            builds.push(build)
        }

        if (builds.length === 0) continue //过滤掉所有的空数组

        const sql = 'insert into buildinfo(id,title,price,type,area,average,towards,floor,decoration,community,age,elevator,property,day,src) values ?'
        //let rows = await query(sql, [builds])
        try {
            let rows = await query(sql, [builds])
            console.log(`爬取地址${linkPool[i]}的${rows.affectedRows}条数据已经入库`)
            //ctx.body += `爬取地址${linkPool[i]}的${rows.affectedRows}条数据已经入库`

        } catch (error) {
            console.log(`${linkPool[i]}的爬取写入操作失败了！失败原因：${error}`)
            //ctx.body += `${linkPool[i]}的爬取写入操作失败了！失败原因：${error}`
        }
    }
    ctx.body = 'ok'
}

/**
 * 向爬取地址发送请求
 * @param {String} link 地址
 */
const requestPage = async (link) => {
    return new Promise((resolve, reject) => {
        const arr = []

        superagent
            .get(link)
            .charset()
            .end((err, res) => {
                if (err) {
                    console.log(err)
                    reject(err)
                }
                resolve(res.text)
            })
    })
}

/**
 * 提取页面中所有的房子的详情页地址
 * @param {String} html html内容
 */
const getDetailUrls = async (html) => {
    return new Promise((resolve, reject) => {
        const $ = cheerio.load(html)
        const list = $('.houseList .list')
        const detailUrl = []
        for (let i = 0, len = list.length; i < len; ++i) {
            let url = $(list[i]).find('p.title>a').attr('href')
            if (!url || url === '') continue
            detailUrl.push('http://esf.zb.fang.com' + url)
        }
        resolve(detailUrl)
    })
}

/**
 * 处理请求得到的页面内容字符串
 * @param {String} html 页面内容html
 */
const dataHandler = async (html, src) => {
    return new Promise((resolve, reject) => {
        const $ = cheerio.load(html)

        let id = uuid.v1(),
            title = $('#lpname .floatl').text().replace(/\'/g, '').trim() //标题

        const right = $('.tab-cont-right>.tr-line')
        let price = $(right[0]).find('.trl-item_top>.price_esf>i').text().replace(/\'/g, '').trim(), //总价

            type = $(right[1]).find('.trl-item1').eq(0).find('.tt').text().replace(/\'/g, '').trim(), //户型
            area = $(right[1]).find('.trl-item1').eq(1).find('.tt').text().replace(/\'/g, '').trim(), //面积
            average = $(right[1]).find('.trl-item1').eq(2).find('.tt').text().replace(/\'/g, '').trim(), //均价

            towards = $(right[2]).find('.trl-item1').eq(0).find('.tt').text().replace(/\'/g, '').trim(), //朝向
            floor = $(right[2]).find('.trl-item1').eq(1).find('.tt').text().replace(/\'/g, '').trim(), //楼层（高层，底层）
            decoration = $(right[2]).find('.trl-item1').eq(2).find('.tt').text().replace(/\'/g, '').trim(), //装修

            community = $(right[3]).find('.trl-item2 .rcont>a').eq(0).text().replace(/\'/g, '').trim() //小区

        const info = $('.content-item>.cont')
        let age = $(info).find('.lab:contains("建筑年代")').next('.rcont').text().replace(/\'/g, '').trim(), //建筑年代
            elevator = $(info).find('.lab:contains("有无电梯")').next('.rcont').text().trim(), //有无电梯
            property = $(info).find('.lab:contains("产权性质")').next('.rcont').text().trim(), //产权
            day = $(info).find('.lab:contains("挂牌时间")').next('.rcont').text().replace(/\'/g, '').trim() //挂牌时间

        if (!title || title.trim() === '') resolve([])

        resolve([
            id,
            title,
            parseInt(price),
            type,
            parseInt(area),
            parseInt(average),
            towards,
            floor,
            decoration,
            community,
            age,
            elevator.includes('有') ? 1 : 0,
            property,
            day,
            src
        ])
    })
}

/**
 * 创建地址池
 * @returns {Array} 地址池
 */
const createLinkPool = () => {
    const arr = [],
        brr = [],
        startUrl = 'http://esf.zb.fang.com/house-a010260/i'
    for (let i = 0; i < 500; i++) {
        arr.push(startUrl + i + '/')
        brr.push('pid_' + i)
    }
    return arr
}

module.exports = {
    crawler
}