// 
// 引入 SDK

const path = require('path')

const crawlab = require('crawlab-sdk')

const axios = require('axios')
const cheerio = require('cheerio')
const schoolName = '电子科技大学'
const baseURL = 'https://news.uestc.edu.cn/'
// 这是一个结果，需要为 object 类型
// const result = {name: 'crawlab'};

;(async () => {
  crawPage(`${baseURL}?n=UestcNews.Front.Category.Page&CatId=42&page=1`, rule)
  // 调用保存结果方法
  // await crawlab.saveItem(result);

  // // 关闭数据库连接，否则程序不会结束
  
})()

async function crawPage(url, func) {
  const { data: html } = await axios(url)
  const $ = cheerio.load(html)
  func && func($)
}

async function rule ($) {
  const res = $('#Degas_news_list li')
  for(let i = 0; i < res.length; i++) {
    const item = $(res[i])
    const href = baseURL + item.find('h3 a').attr('href')
    const title = item.find('h3 a').text().trim()
    const date = item.find('.time').text().trim()
    if (href && title && date) {
      await crawlab.saveItem({
      time: date,
      url: href,
      title,
      schoolName
    })
    }
  }
  const next = $('.move-page a').filter(function(i, el) {
    return $(this).text() === '下一页»';
  }).attr('href')
  const lastText = $('.pagination .total').find('span').text().replace(/\D/g, '')
  const last = Math.ceil(parseInt(lastText) / 15)
  const page = parseInt(next.replace(/.*page=/g, ''))
  // const next = $('.Next').attr('href')
  if (next && next !== 'javascript:void(0);' && page !== last) {
    crawPage(`${baseURL}${next}`, rule)
  } else {
    await crawlab.close();
  }
}