// 豆瓣电影top 250
// 需要引入哪个npm包 superagent --> 专门打理后端的标准库
import superagent from 'superagent'
import cheerio from 'cheerio'
import fs from 'fs'
import path from 'path'

interface Info {
  title: string;
  url: string;
  pic: string;
}
interface InfoReuslt {
  time: number;
  data: Info[]
}
interface objJson {
  [propName: number]: Info[]
}

class Crawler {
  // es6 class property 
  // babel支持的class新的写法
  private url = 'https://movie.douban.com/top250';
  constructor() {
    this.initSpiderProcess()
  }
  async initSpiderProcess() {
    // superagent发送get请求
    const { text: html } = await this.getRowHtml();
    // console.log(html);
    const info = await this.getJsonInfo(html)
    this.getJsonContent(info)
  }
  async getJsonContent(info: InfoReuslt) {
    const filePath = path.resolve(__dirname, './data/url.json')
    console.log(filePath, typeof filePath, '================');
    let fileContent: objJson = {}

    // 安全校验
    if (fs.existsSync(filePath)) {
      // buffer对象 缓冲区
      fileContent = JSON.parse(fs.readFileSync(filePath, 'utf-8'))
    }
    console.log(fileContent, '+++++++++++++++');
    fileContent[info.time] = info.data;
    fs.writeFileSync(filePath, JSON.stringify(fileContent))
  }
  async getRowHtml() {
    // 请求
    // getJsonInfo
    const result = await superagent.get(this.url)
    return result
  }
  getJsonInfo(html: string) {
    const $ = cheerio.load(html) // html字符串 --> load进内存中 --> 构建虚拟DOM树 $代表着DOM树
    // ol有序列表 ul无序列表
    const movies = $(".grid_view .item");
    // console.log(movies);
    const info: Info[] = []
    for (let i = 0; i < movies.length; i++) {
      let movie = movies[i];
      let pic: string = $(movie).find('.pic img').attr('src') || ''  // 包装器
      let title: string = $(movie).find('.info .title:first-child').text()
      let url: string = $(movie).find('.info a').attr('href') || ''
      info.push({
        title,
        url,
        pic
      })
    }
    const result = {
      time: new Date().getTime(),
      data: info
    }
    // console.log(result, '+++++++++++++');
    return result
  }
}

const crawler = new Crawler();