import superagent from 'superagent';;
import cheerio from 'cheerio';
import fs from 'fs'
import path from 'path'

interface Target {
    name: string,
    comment: string,
}

interface storeData {
    time: number,
    data: Target[],
}

interface FileData {
    [propName: string]:  Target[],
}

class Crawler {
    private url = 'https://movie.douban.com/top250'

    async getHTML() {
        const requestHTML = await superagent.get(this.url)

        return requestHTML.text
    }

    async parseHTML(html: string) {
        const recommentFilm: Target[] = []

        const $ = cheerio.load(html)

        // 遍历每一个电影
        const filmItem = $('.item')

        filmItem.map((index, item) => {
            if (index >= 0) {
                const name: string = $(item).find('.hd').text()

                const comment = $(item).find('.inq').text()

                recommentFilm.push({name, comment});
            }
        })
        return recommentFilm;
    }

    async storeToFile(data:Target[]) {
        const topFilmData: storeData = {
            time: new Date().getTime(),
            data: data,
        }

        const filePath = path.resolve('./data/film.json')

        const fileData: FileData = {}

        // 判断是否存在文件
        if(fs.existsSync(filePath)) {
            // 重新定义文件路径和格式
            fileData[topFilmData.time] = JSON.parse(fs.readFileSync(filePath , "utf-8"));
        }

        fileData[topFilmData.time] = topFilmData.data

        fs.writeFileSync(filePath , JSON.stringify(fileData))

    }

    async initCrawer(){
        const html = await this.getHTML()

        const content = await this.parseHTML(html);

        this.storeToFile(content);
    }

    constructor() {
        this.initCrawer()
    }
}

const crawler = new Crawler()
